|
@@
-1,3967
+1,3966
|
|
1
|
# cmdutil.py - help for command processing in mercurial
|
|
1
|
# cmdutil.py - help for command processing in mercurial
|
|
2
|
#
|
|
2
|
#
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
4
|
#
|
|
4
|
#
|
|
5
|
# This software may be used and distributed according to the terms of the
|
|
5
|
# This software may be used and distributed according to the terms of the
|
|
6
|
# GNU General Public License version 2 or any later version.
|
|
6
|
# GNU General Public License version 2 or any later version.
|
|
7
|
|
|
7
|
|
|
8
|
from __future__ import absolute_import
|
|
8
|
from __future__ import absolute_import
|
|
9
|
|
|
9
|
|
|
10
|
import errno
|
|
10
|
import errno
|
|
11
|
import itertools
|
|
11
|
import itertools
|
|
12
|
import os
|
|
12
|
import os
|
|
13
|
import re
|
|
13
|
import re
|
|
14
|
import tempfile
|
|
14
|
import tempfile
|
|
15
|
|
|
15
|
|
|
16
|
from .i18n import _
|
|
16
|
from .i18n import _
|
|
17
|
from .node import (
|
|
17
|
from .node import (
|
|
18
|
hex,
|
|
18
|
hex,
|
|
19
|
nullid,
|
|
19
|
nullid,
|
|
20
|
nullrev,
|
|
20
|
nullrev,
|
|
21
|
short,
|
|
21
|
short,
|
|
22
|
)
|
|
22
|
)
|
|
23
|
|
|
23
|
|
|
24
|
from . import (
|
|
24
|
from . import (
|
|
25
|
bookmarks,
|
|
25
|
bookmarks,
|
|
26
|
changelog,
|
|
26
|
changelog,
|
|
27
|
copies,
|
|
27
|
copies,
|
|
28
|
crecord as crecordmod,
|
|
28
|
crecord as crecordmod,
|
|
29
|
dagop,
|
|
29
|
dagop,
|
|
30
|
dirstateguard,
|
|
30
|
dirstateguard,
|
|
31
|
encoding,
|
|
31
|
encoding,
|
|
32
|
error,
|
|
32
|
error,
|
|
33
|
formatter,
|
|
33
|
formatter,
|
|
34
|
graphmod,
|
|
34
|
graphmod,
|
|
35
|
match as matchmod,
|
|
35
|
match as matchmod,
|
|
36
|
mdiff,
|
|
36
|
mdiff,
|
|
37
|
obsolete,
|
|
37
|
obsolete,
|
|
38
|
patch,
|
|
38
|
patch,
|
|
39
|
pathutil,
|
|
39
|
pathutil,
|
|
40
|
pycompat,
|
|
40
|
pycompat,
|
|
41
|
registrar,
|
|
41
|
registrar,
|
|
42
|
revlog,
|
|
42
|
revlog,
|
|
43
|
revset,
|
|
43
|
revset,
|
|
44
|
scmutil,
|
|
44
|
scmutil,
|
|
45
|
smartset,
|
|
45
|
smartset,
|
|
46
|
templatekw,
|
|
46
|
templatekw,
|
|
47
|
templater,
|
|
47
|
templater,
|
|
48
|
util,
|
|
48
|
util,
|
|
49
|
vfs as vfsmod,
|
|
49
|
vfs as vfsmod,
|
|
50
|
)
|
|
50
|
)
|
|
51
|
stringio = util.stringio
|
|
51
|
stringio = util.stringio
|
|
52
|
|
|
52
|
|
|
53
|
# templates of common command options
|
|
53
|
# templates of common command options
|
|
54
|
|
|
54
|
|
|
55
|
dryrunopts = [
|
|
55
|
dryrunopts = [
|
|
56
|
('n', 'dry-run', None,
|
|
56
|
('n', 'dry-run', None,
|
|
57
|
_('do not perform actions, just print output')),
|
|
57
|
_('do not perform actions, just print output')),
|
|
58
|
]
|
|
58
|
]
|
|
59
|
|
|
59
|
|
|
60
|
remoteopts = [
|
|
60
|
remoteopts = [
|
|
61
|
('e', 'ssh', '',
|
|
61
|
('e', 'ssh', '',
|
|
62
|
_('specify ssh command to use'), _('CMD')),
|
|
62
|
_('specify ssh command to use'), _('CMD')),
|
|
63
|
('', 'remotecmd', '',
|
|
63
|
('', 'remotecmd', '',
|
|
64
|
_('specify hg command to run on the remote side'), _('CMD')),
|
|
64
|
_('specify hg command to run on the remote side'), _('CMD')),
|
|
65
|
('', 'insecure', None,
|
|
65
|
('', 'insecure', None,
|
|
66
|
_('do not verify server certificate (ignoring web.cacerts config)')),
|
|
66
|
_('do not verify server certificate (ignoring web.cacerts config)')),
|
|
67
|
]
|
|
67
|
]
|
|
68
|
|
|
68
|
|
|
69
|
walkopts = [
|
|
69
|
walkopts = [
|
|
70
|
('I', 'include', [],
|
|
70
|
('I', 'include', [],
|
|
71
|
_('include names matching the given patterns'), _('PATTERN')),
|
|
71
|
_('include names matching the given patterns'), _('PATTERN')),
|
|
72
|
('X', 'exclude', [],
|
|
72
|
('X', 'exclude', [],
|
|
73
|
_('exclude names matching the given patterns'), _('PATTERN')),
|
|
73
|
_('exclude names matching the given patterns'), _('PATTERN')),
|
|
74
|
]
|
|
74
|
]
|
|
75
|
|
|
75
|
|
|
76
|
commitopts = [
|
|
76
|
commitopts = [
|
|
77
|
('m', 'message', '',
|
|
77
|
('m', 'message', '',
|
|
78
|
_('use text as commit message'), _('TEXT')),
|
|
78
|
_('use text as commit message'), _('TEXT')),
|
|
79
|
('l', 'logfile', '',
|
|
79
|
('l', 'logfile', '',
|
|
80
|
_('read commit message from file'), _('FILE')),
|
|
80
|
_('read commit message from file'), _('FILE')),
|
|
81
|
]
|
|
81
|
]
|
|
82
|
|
|
82
|
|
|
83
|
commitopts2 = [
|
|
83
|
commitopts2 = [
|
|
84
|
('d', 'date', '',
|
|
84
|
('d', 'date', '',
|
|
85
|
_('record the specified date as commit date'), _('DATE')),
|
|
85
|
_('record the specified date as commit date'), _('DATE')),
|
|
86
|
('u', 'user', '',
|
|
86
|
('u', 'user', '',
|
|
87
|
_('record the specified user as committer'), _('USER')),
|
|
87
|
_('record the specified user as committer'), _('USER')),
|
|
88
|
]
|
|
88
|
]
|
|
89
|
|
|
89
|
|
|
90
|
# hidden for now
|
|
90
|
# hidden for now
|
|
91
|
formatteropts = [
|
|
91
|
formatteropts = [
|
|
92
|
('T', 'template', '',
|
|
92
|
('T', 'template', '',
|
|
93
|
_('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
|
|
93
|
_('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
|
|
94
|
]
|
|
94
|
]
|
|
95
|
|
|
95
|
|
|
96
|
templateopts = [
|
|
96
|
templateopts = [
|
|
97
|
('', 'style', '',
|
|
97
|
('', 'style', '',
|
|
98
|
_('display using template map file (DEPRECATED)'), _('STYLE')),
|
|
98
|
_('display using template map file (DEPRECATED)'), _('STYLE')),
|
|
99
|
('T', 'template', '',
|
|
99
|
('T', 'template', '',
|
|
100
|
_('display with template'), _('TEMPLATE')),
|
|
100
|
_('display with template'), _('TEMPLATE')),
|
|
101
|
]
|
|
101
|
]
|
|
102
|
|
|
102
|
|
|
103
|
logopts = [
|
|
103
|
logopts = [
|
|
104
|
('p', 'patch', None, _('show patch')),
|
|
104
|
('p', 'patch', None, _('show patch')),
|
|
105
|
('g', 'git', None, _('use git extended diff format')),
|
|
105
|
('g', 'git', None, _('use git extended diff format')),
|
|
106
|
('l', 'limit', '',
|
|
106
|
('l', 'limit', '',
|
|
107
|
_('limit number of changes displayed'), _('NUM')),
|
|
107
|
_('limit number of changes displayed'), _('NUM')),
|
|
108
|
('M', 'no-merges', None, _('do not show merges')),
|
|
108
|
('M', 'no-merges', None, _('do not show merges')),
|
|
109
|
('', 'stat', None, _('output diffstat-style summary of changes')),
|
|
109
|
('', 'stat', None, _('output diffstat-style summary of changes')),
|
|
110
|
('G', 'graph', None, _("show the revision DAG")),
|
|
110
|
('G', 'graph', None, _("show the revision DAG")),
|
|
111
|
] + templateopts
|
|
111
|
] + templateopts
|
|
112
|
|
|
112
|
|
|
113
|
diffopts = [
|
|
113
|
diffopts = [
|
|
114
|
('a', 'text', None, _('treat all files as text')),
|
|
114
|
('a', 'text', None, _('treat all files as text')),
|
|
115
|
('g', 'git', None, _('use git extended diff format')),
|
|
115
|
('g', 'git', None, _('use git extended diff format')),
|
|
116
|
('', 'binary', None, _('generate binary diffs in git mode (default)')),
|
|
116
|
('', 'binary', None, _('generate binary diffs in git mode (default)')),
|
|
117
|
('', 'nodates', None, _('omit dates from diff headers'))
|
|
117
|
('', 'nodates', None, _('omit dates from diff headers'))
|
|
118
|
]
|
|
118
|
]
|
|
119
|
|
|
119
|
|
|
120
|
diffwsopts = [
|
|
120
|
diffwsopts = [
|
|
121
|
('w', 'ignore-all-space', None,
|
|
121
|
('w', 'ignore-all-space', None,
|
|
122
|
_('ignore white space when comparing lines')),
|
|
122
|
_('ignore white space when comparing lines')),
|
|
123
|
('b', 'ignore-space-change', None,
|
|
123
|
('b', 'ignore-space-change', None,
|
|
124
|
_('ignore changes in the amount of white space')),
|
|
124
|
_('ignore changes in the amount of white space')),
|
|
125
|
('B', 'ignore-blank-lines', None,
|
|
125
|
('B', 'ignore-blank-lines', None,
|
|
126
|
_('ignore changes whose lines are all blank')),
|
|
126
|
_('ignore changes whose lines are all blank')),
|
|
127
|
('Z', 'ignore-space-at-eol', None,
|
|
127
|
('Z', 'ignore-space-at-eol', None,
|
|
128
|
_('ignore changes in whitespace at EOL')),
|
|
128
|
_('ignore changes in whitespace at EOL')),
|
|
129
|
]
|
|
129
|
]
|
|
130
|
|
|
130
|
|
|
131
|
diffopts2 = [
|
|
131
|
diffopts2 = [
|
|
132
|
('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
|
|
132
|
('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
|
|
133
|
('p', 'show-function', None, _('show which function each change is in')),
|
|
133
|
('p', 'show-function', None, _('show which function each change is in')),
|
|
134
|
('', 'reverse', None, _('produce a diff that undoes the changes')),
|
|
134
|
('', 'reverse', None, _('produce a diff that undoes the changes')),
|
|
135
|
] + diffwsopts + [
|
|
135
|
] + diffwsopts + [
|
|
136
|
('U', 'unified', '',
|
|
136
|
('U', 'unified', '',
|
|
137
|
_('number of lines of context to show'), _('NUM')),
|
|
137
|
_('number of lines of context to show'), _('NUM')),
|
|
138
|
('', 'stat', None, _('output diffstat-style summary of changes')),
|
|
138
|
('', 'stat', None, _('output diffstat-style summary of changes')),
|
|
139
|
('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
|
|
139
|
('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
|
|
140
|
]
|
|
140
|
]
|
|
141
|
|
|
141
|
|
|
142
|
mergetoolopts = [
|
|
142
|
mergetoolopts = [
|
|
143
|
('t', 'tool', '', _('specify merge tool')),
|
|
143
|
('t', 'tool', '', _('specify merge tool')),
|
|
144
|
]
|
|
144
|
]
|
|
145
|
|
|
145
|
|
|
146
|
similarityopts = [
|
|
146
|
similarityopts = [
|
|
147
|
('s', 'similarity', '',
|
|
147
|
('s', 'similarity', '',
|
|
148
|
_('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
|
|
148
|
_('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
|
|
149
|
]
|
|
149
|
]
|
|
150
|
|
|
150
|
|
|
151
|
subrepoopts = [
|
|
151
|
subrepoopts = [
|
|
152
|
('S', 'subrepos', None,
|
|
152
|
('S', 'subrepos', None,
|
|
153
|
_('recurse into subrepositories'))
|
|
153
|
_('recurse into subrepositories'))
|
|
154
|
]
|
|
154
|
]
|
|
155
|
|
|
155
|
|
|
156
|
debugrevlogopts = [
|
|
156
|
debugrevlogopts = [
|
|
157
|
('c', 'changelog', False, _('open changelog')),
|
|
157
|
('c', 'changelog', False, _('open changelog')),
|
|
158
|
('m', 'manifest', False, _('open manifest')),
|
|
158
|
('m', 'manifest', False, _('open manifest')),
|
|
159
|
('', 'dir', '', _('open directory manifest')),
|
|
159
|
('', 'dir', '', _('open directory manifest')),
|
|
160
|
]
|
|
160
|
]
|
|
161
|
|
|
161
|
|
|
162
|
# special string such that everything below this line will be ingored in the
|
|
162
|
# special string such that everything below this line will be ingored in the
|
|
163
|
# editor text
|
|
163
|
# editor text
|
|
164
|
_linebelow = "^HG: ------------------------ >8 ------------------------$"
|
|
164
|
_linebelow = "^HG: ------------------------ >8 ------------------------$"
|
|
165
|
|
|
165
|
|
|
166
|
def ishunk(x):
|
|
166
|
def ishunk(x):
|
|
167
|
hunkclasses = (crecordmod.uihunk, patch.recordhunk)
|
|
167
|
hunkclasses = (crecordmod.uihunk, patch.recordhunk)
|
|
168
|
return isinstance(x, hunkclasses)
|
|
168
|
return isinstance(x, hunkclasses)
|
|
169
|
|
|
169
|
|
|
170
|
def newandmodified(chunks, originalchunks):
|
|
170
|
def newandmodified(chunks, originalchunks):
|
|
171
|
newlyaddedandmodifiedfiles = set()
|
|
171
|
newlyaddedandmodifiedfiles = set()
|
|
172
|
for chunk in chunks:
|
|
172
|
for chunk in chunks:
|
|
173
|
if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
|
|
173
|
if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
|
|
174
|
originalchunks:
|
|
174
|
originalchunks:
|
|
175
|
newlyaddedandmodifiedfiles.add(chunk.header.filename())
|
|
175
|
newlyaddedandmodifiedfiles.add(chunk.header.filename())
|
|
176
|
return newlyaddedandmodifiedfiles
|
|
176
|
return newlyaddedandmodifiedfiles
|
|
177
|
|
|
177
|
|
|
178
|
def parsealiases(cmd):
|
|
178
|
def parsealiases(cmd):
|
|
179
|
return cmd.lstrip("^").split("|")
|
|
179
|
return cmd.lstrip("^").split("|")
|
|
180
|
|
|
180
|
|
|
181
|
def setupwrapcolorwrite(ui):
|
|
181
|
def setupwrapcolorwrite(ui):
|
|
182
|
# wrap ui.write so diff output can be labeled/colorized
|
|
182
|
# wrap ui.write so diff output can be labeled/colorized
|
|
183
|
def wrapwrite(orig, *args, **kw):
|
|
183
|
def wrapwrite(orig, *args, **kw):
|
|
184
|
label = kw.pop(r'label', '')
|
|
184
|
label = kw.pop(r'label', '')
|
|
185
|
for chunk, l in patch.difflabel(lambda: args):
|
|
185
|
for chunk, l in patch.difflabel(lambda: args):
|
|
186
|
orig(chunk, label=label + l)
|
|
186
|
orig(chunk, label=label + l)
|
|
187
|
|
|
187
|
|
|
188
|
oldwrite = ui.write
|
|
188
|
oldwrite = ui.write
|
|
189
|
def wrap(*args, **kwargs):
|
|
189
|
def wrap(*args, **kwargs):
|
|
190
|
return wrapwrite(oldwrite, *args, **kwargs)
|
|
190
|
return wrapwrite(oldwrite, *args, **kwargs)
|
|
191
|
setattr(ui, 'write', wrap)
|
|
191
|
setattr(ui, 'write', wrap)
|
|
192
|
return oldwrite
|
|
192
|
return oldwrite
|
|
193
|
|
|
193
|
|
|
194
|
def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
|
|
194
|
def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
|
|
195
|
if usecurses:
|
|
195
|
if usecurses:
|
|
196
|
if testfile:
|
|
196
|
if testfile:
|
|
197
|
recordfn = crecordmod.testdecorator(testfile,
|
|
197
|
recordfn = crecordmod.testdecorator(testfile,
|
|
198
|
crecordmod.testchunkselector)
|
|
198
|
crecordmod.testchunkselector)
|
|
199
|
else:
|
|
199
|
else:
|
|
200
|
recordfn = crecordmod.chunkselector
|
|
200
|
recordfn = crecordmod.chunkselector
|
|
201
|
|
|
201
|
|
|
202
|
return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
|
|
202
|
return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
|
|
203
|
|
|
203
|
|
|
204
|
else:
|
|
204
|
else:
|
|
205
|
return patch.filterpatch(ui, originalhunks, operation)
|
|
205
|
return patch.filterpatch(ui, originalhunks, operation)
|
|
206
|
|
|
206
|
|
|
207
|
def recordfilter(ui, originalhunks, operation=None):
|
|
207
|
def recordfilter(ui, originalhunks, operation=None):
|
|
208
|
""" Prompts the user to filter the originalhunks and return a list of
|
|
208
|
""" Prompts the user to filter the originalhunks and return a list of
|
|
209
|
selected hunks.
|
|
209
|
selected hunks.
|
|
210
|
*operation* is used for to build ui messages to indicate the user what
|
|
210
|
*operation* is used for to build ui messages to indicate the user what
|
|
211
|
kind of filtering they are doing: reverting, committing, shelving, etc.
|
|
211
|
kind of filtering they are doing: reverting, committing, shelving, etc.
|
|
212
|
(see patch.filterpatch).
|
|
212
|
(see patch.filterpatch).
|
|
213
|
"""
|
|
213
|
"""
|
|
214
|
usecurses = crecordmod.checkcurses(ui)
|
|
214
|
usecurses = crecordmod.checkcurses(ui)
|
|
215
|
testfile = ui.config('experimental', 'crecordtest')
|
|
215
|
testfile = ui.config('experimental', 'crecordtest')
|
|
216
|
oldwrite = setupwrapcolorwrite(ui)
|
|
216
|
oldwrite = setupwrapcolorwrite(ui)
|
|
217
|
try:
|
|
217
|
try:
|
|
218
|
newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
|
|
218
|
newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
|
|
219
|
testfile, operation)
|
|
219
|
testfile, operation)
|
|
220
|
finally:
|
|
220
|
finally:
|
|
221
|
ui.write = oldwrite
|
|
221
|
ui.write = oldwrite
|
|
222
|
return newchunks, newopts
|
|
222
|
return newchunks, newopts
|
|
223
|
|
|
223
|
|
|
224
|
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
|
|
224
|
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
|
|
225
|
filterfn, *pats, **opts):
|
|
225
|
filterfn, *pats, **opts):
|
|
226
|
from . import merge as mergemod
|
|
226
|
from . import merge as mergemod
|
|
227
|
opts = pycompat.byteskwargs(opts)
|
|
227
|
opts = pycompat.byteskwargs(opts)
|
|
228
|
if not ui.interactive():
|
|
228
|
if not ui.interactive():
|
|
229
|
if cmdsuggest:
|
|
229
|
if cmdsuggest:
|
|
230
|
msg = _('running non-interactively, use %s instead') % cmdsuggest
|
|
230
|
msg = _('running non-interactively, use %s instead') % cmdsuggest
|
|
231
|
else:
|
|
231
|
else:
|
|
232
|
msg = _('running non-interactively')
|
|
232
|
msg = _('running non-interactively')
|
|
233
|
raise error.Abort(msg)
|
|
233
|
raise error.Abort(msg)
|
|
234
|
|
|
234
|
|
|
235
|
# make sure username is set before going interactive
|
|
235
|
# make sure username is set before going interactive
|
|
236
|
if not opts.get('user'):
|
|
236
|
if not opts.get('user'):
|
|
237
|
ui.username() # raise exception, username not provided
|
|
237
|
ui.username() # raise exception, username not provided
|
|
238
|
|
|
238
|
|
|
239
|
def recordfunc(ui, repo, message, match, opts):
|
|
239
|
def recordfunc(ui, repo, message, match, opts):
|
|
240
|
"""This is generic record driver.
|
|
240
|
"""This is generic record driver.
|
|
241
|
|
|
241
|
|
|
242
|
Its job is to interactively filter local changes, and
|
|
242
|
Its job is to interactively filter local changes, and
|
|
243
|
accordingly prepare working directory into a state in which the
|
|
243
|
accordingly prepare working directory into a state in which the
|
|
244
|
job can be delegated to a non-interactive commit command such as
|
|
244
|
job can be delegated to a non-interactive commit command such as
|
|
245
|
'commit' or 'qrefresh'.
|
|
245
|
'commit' or 'qrefresh'.
|
|
246
|
|
|
246
|
|
|
247
|
After the actual job is done by non-interactive command, the
|
|
247
|
After the actual job is done by non-interactive command, the
|
|
248
|
working directory is restored to its original state.
|
|
248
|
working directory is restored to its original state.
|
|
249
|
|
|
249
|
|
|
250
|
In the end we'll record interesting changes, and everything else
|
|
250
|
In the end we'll record interesting changes, and everything else
|
|
251
|
will be left in place, so the user can continue working.
|
|
251
|
will be left in place, so the user can continue working.
|
|
252
|
"""
|
|
252
|
"""
|
|
253
|
|
|
253
|
|
|
254
|
checkunfinished(repo, commit=True)
|
|
254
|
checkunfinished(repo, commit=True)
|
|
255
|
wctx = repo[None]
|
|
255
|
wctx = repo[None]
|
|
256
|
merge = len(wctx.parents()) > 1
|
|
256
|
merge = len(wctx.parents()) > 1
|
|
257
|
if merge:
|
|
257
|
if merge:
|
|
258
|
raise error.Abort(_('cannot partially commit a merge '
|
|
258
|
raise error.Abort(_('cannot partially commit a merge '
|
|
259
|
'(use "hg commit" instead)'))
|
|
259
|
'(use "hg commit" instead)'))
|
|
260
|
|
|
260
|
|
|
261
|
def fail(f, msg):
|
|
261
|
def fail(f, msg):
|
|
262
|
raise error.Abort('%s: %s' % (f, msg))
|
|
262
|
raise error.Abort('%s: %s' % (f, msg))
|
|
263
|
|
|
263
|
|
|
264
|
force = opts.get('force')
|
|
264
|
force = opts.get('force')
|
|
265
|
if not force:
|
|
265
|
if not force:
|
|
266
|
vdirs = []
|
|
266
|
vdirs = []
|
|
267
|
match.explicitdir = vdirs.append
|
|
267
|
match.explicitdir = vdirs.append
|
|
268
|
match.bad = fail
|
|
268
|
match.bad = fail
|
|
269
|
|
|
269
|
|
|
270
|
status = repo.status(match=match)
|
|
270
|
status = repo.status(match=match)
|
|
271
|
if not force:
|
|
271
|
if not force:
|
|
272
|
repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
|
|
272
|
repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
|
|
273
|
diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
|
|
273
|
diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
|
|
274
|
diffopts.nodates = True
|
|
274
|
diffopts.nodates = True
|
|
275
|
diffopts.git = True
|
|
275
|
diffopts.git = True
|
|
276
|
diffopts.showfunc = True
|
|
276
|
diffopts.showfunc = True
|
|
277
|
originaldiff = patch.diff(repo, changes=status, opts=diffopts)
|
|
277
|
originaldiff = patch.diff(repo, changes=status, opts=diffopts)
|
|
278
|
originalchunks = patch.parsepatch(originaldiff)
|
|
278
|
originalchunks = patch.parsepatch(originaldiff)
|
|
279
|
|
|
279
|
|
|
280
|
# 1. filter patch, since we are intending to apply subset of it
|
|
280
|
# 1. filter patch, since we are intending to apply subset of it
|
|
281
|
try:
|
|
281
|
try:
|
|
282
|
chunks, newopts = filterfn(ui, originalchunks)
|
|
282
|
chunks, newopts = filterfn(ui, originalchunks)
|
|
283
|
except error.PatchError as err:
|
|
283
|
except error.PatchError as err:
|
|
284
|
raise error.Abort(_('error parsing patch: %s') % err)
|
|
284
|
raise error.Abort(_('error parsing patch: %s') % err)
|
|
285
|
opts.update(newopts)
|
|
285
|
opts.update(newopts)
|
|
286
|
|
|
286
|
|
|
287
|
# We need to keep a backup of files that have been newly added and
|
|
287
|
# We need to keep a backup of files that have been newly added and
|
|
288
|
# modified during the recording process because there is a previous
|
|
288
|
# modified during the recording process because there is a previous
|
|
289
|
# version without the edit in the workdir
|
|
289
|
# version without the edit in the workdir
|
|
290
|
newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
|
|
290
|
newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
|
|
291
|
contenders = set()
|
|
291
|
contenders = set()
|
|
292
|
for h in chunks:
|
|
292
|
for h in chunks:
|
|
293
|
try:
|
|
293
|
try:
|
|
294
|
contenders.update(set(h.files()))
|
|
294
|
contenders.update(set(h.files()))
|
|
295
|
except AttributeError:
|
|
295
|
except AttributeError:
|
|
296
|
pass
|
|
296
|
pass
|
|
297
|
|
|
297
|
|
|
298
|
changed = status.modified + status.added + status.removed
|
|
298
|
changed = status.modified + status.added + status.removed
|
|
299
|
newfiles = [f for f in changed if f in contenders]
|
|
299
|
newfiles = [f for f in changed if f in contenders]
|
|
300
|
if not newfiles:
|
|
300
|
if not newfiles:
|
|
301
|
ui.status(_('no changes to record\n'))
|
|
301
|
ui.status(_('no changes to record\n'))
|
|
302
|
return 0
|
|
302
|
return 0
|
|
303
|
|
|
303
|
|
|
304
|
modified = set(status.modified)
|
|
304
|
modified = set(status.modified)
|
|
305
|
|
|
305
|
|
|
306
|
# 2. backup changed files, so we can restore them in the end
|
|
306
|
# 2. backup changed files, so we can restore them in the end
|
|
307
|
|
|
307
|
|
|
308
|
if backupall:
|
|
308
|
if backupall:
|
|
309
|
tobackup = changed
|
|
309
|
tobackup = changed
|
|
310
|
else:
|
|
310
|
else:
|
|
311
|
tobackup = [f for f in newfiles if f in modified or f in \
|
|
311
|
tobackup = [f for f in newfiles if f in modified or f in \
|
|
312
|
newlyaddedandmodifiedfiles]
|
|
312
|
newlyaddedandmodifiedfiles]
|
|
313
|
backups = {}
|
|
313
|
backups = {}
|
|
314
|
if tobackup:
|
|
314
|
if tobackup:
|
|
315
|
backupdir = repo.vfs.join('record-backups')
|
|
315
|
backupdir = repo.vfs.join('record-backups')
|
|
316
|
try:
|
|
316
|
try:
|
|
317
|
os.mkdir(backupdir)
|
|
317
|
os.mkdir(backupdir)
|
|
318
|
except OSError as err:
|
|
318
|
except OSError as err:
|
|
319
|
if err.errno != errno.EEXIST:
|
|
319
|
if err.errno != errno.EEXIST:
|
|
320
|
raise
|
|
320
|
raise
|
|
321
|
try:
|
|
321
|
try:
|
|
322
|
# backup continues
|
|
322
|
# backup continues
|
|
323
|
for f in tobackup:
|
|
323
|
for f in tobackup:
|
|
324
|
fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
|
|
324
|
fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
|
|
325
|
dir=backupdir)
|
|
325
|
dir=backupdir)
|
|
326
|
os.close(fd)
|
|
326
|
os.close(fd)
|
|
327
|
ui.debug('backup %r as %r\n' % (f, tmpname))
|
|
327
|
ui.debug('backup %r as %r\n' % (f, tmpname))
|
|
328
|
util.copyfile(repo.wjoin(f), tmpname, copystat=True)
|
|
328
|
util.copyfile(repo.wjoin(f), tmpname, copystat=True)
|
|
329
|
backups[f] = tmpname
|
|
329
|
backups[f] = tmpname
|
|
330
|
|
|
330
|
|
|
331
|
fp = stringio()
|
|
331
|
fp = stringio()
|
|
332
|
for c in chunks:
|
|
332
|
for c in chunks:
|
|
333
|
fname = c.filename()
|
|
333
|
fname = c.filename()
|
|
334
|
if fname in backups:
|
|
334
|
if fname in backups:
|
|
335
|
c.write(fp)
|
|
335
|
c.write(fp)
|
|
336
|
dopatch = fp.tell()
|
|
336
|
dopatch = fp.tell()
|
|
337
|
fp.seek(0)
|
|
337
|
fp.seek(0)
|
|
338
|
|
|
338
|
|
|
339
|
# 2.5 optionally review / modify patch in text editor
|
|
339
|
# 2.5 optionally review / modify patch in text editor
|
|
340
|
if opts.get('review', False):
|
|
340
|
if opts.get('review', False):
|
|
341
|
patchtext = (crecordmod.diffhelptext
|
|
341
|
patchtext = (crecordmod.diffhelptext
|
|
342
|
+ crecordmod.patchhelptext
|
|
342
|
+ crecordmod.patchhelptext
|
|
343
|
+ fp.read())
|
|
343
|
+ fp.read())
|
|
344
|
reviewedpatch = ui.edit(patchtext, "",
|
|
344
|
reviewedpatch = ui.edit(patchtext, "",
|
|
345
|
action="diff",
|
|
345
|
action="diff",
|
|
346
|
repopath=repo.path)
|
|
346
|
repopath=repo.path)
|
|
347
|
fp.truncate(0)
|
|
347
|
fp.truncate(0)
|
|
348
|
fp.write(reviewedpatch)
|
|
348
|
fp.write(reviewedpatch)
|
|
349
|
fp.seek(0)
|
|
349
|
fp.seek(0)
|
|
350
|
|
|
350
|
|
|
351
|
[os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
|
|
351
|
[os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
|
|
352
|
# 3a. apply filtered patch to clean repo (clean)
|
|
352
|
# 3a. apply filtered patch to clean repo (clean)
|
|
353
|
if backups:
|
|
353
|
if backups:
|
|
354
|
# Equivalent to hg.revert
|
|
354
|
# Equivalent to hg.revert
|
|
355
|
m = scmutil.matchfiles(repo, backups.keys())
|
|
355
|
m = scmutil.matchfiles(repo, backups.keys())
|
|
356
|
mergemod.update(repo, repo.dirstate.p1(),
|
|
356
|
mergemod.update(repo, repo.dirstate.p1(),
|
|
357
|
False, True, matcher=m)
|
|
357
|
False, True, matcher=m)
|
|
358
|
|
|
358
|
|
|
359
|
# 3b. (apply)
|
|
359
|
# 3b. (apply)
|
|
360
|
if dopatch:
|
|
360
|
if dopatch:
|
|
361
|
try:
|
|
361
|
try:
|
|
362
|
ui.debug('applying patch\n')
|
|
362
|
ui.debug('applying patch\n')
|
|
363
|
ui.debug(fp.getvalue())
|
|
363
|
ui.debug(fp.getvalue())
|
|
364
|
patch.internalpatch(ui, repo, fp, 1, eolmode=None)
|
|
364
|
patch.internalpatch(ui, repo, fp, 1, eolmode=None)
|
|
365
|
except error.PatchError as err:
|
|
365
|
except error.PatchError as err:
|
|
366
|
raise error.Abort(str(err))
|
|
366
|
raise error.Abort(str(err))
|
|
367
|
del fp
|
|
367
|
del fp
|
|
368
|
|
|
368
|
|
|
369
|
# 4. We prepared working directory according to filtered
|
|
369
|
# 4. We prepared working directory according to filtered
|
|
370
|
# patch. Now is the time to delegate the job to
|
|
370
|
# patch. Now is the time to delegate the job to
|
|
371
|
# commit/qrefresh or the like!
|
|
371
|
# commit/qrefresh or the like!
|
|
372
|
|
|
372
|
|
|
373
|
# Make all of the pathnames absolute.
|
|
373
|
# Make all of the pathnames absolute.
|
|
374
|
newfiles = [repo.wjoin(nf) for nf in newfiles]
|
|
374
|
newfiles = [repo.wjoin(nf) for nf in newfiles]
|
|
375
|
return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
|
|
375
|
return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
|
|
376
|
finally:
|
|
376
|
finally:
|
|
377
|
# 5. finally restore backed-up files
|
|
377
|
# 5. finally restore backed-up files
|
|
378
|
try:
|
|
378
|
try:
|
|
379
|
dirstate = repo.dirstate
|
|
379
|
dirstate = repo.dirstate
|
|
380
|
for realname, tmpname in backups.iteritems():
|
|
380
|
for realname, tmpname in backups.iteritems():
|
|
381
|
ui.debug('restoring %r to %r\n' % (tmpname, realname))
|
|
381
|
ui.debug('restoring %r to %r\n' % (tmpname, realname))
|
|
382
|
|
|
382
|
|
|
383
|
if dirstate[realname] == 'n':
|
|
383
|
if dirstate[realname] == 'n':
|
|
384
|
# without normallookup, restoring timestamp
|
|
384
|
# without normallookup, restoring timestamp
|
|
385
|
# may cause partially committed files
|
|
385
|
# may cause partially committed files
|
|
386
|
# to be treated as unmodified
|
|
386
|
# to be treated as unmodified
|
|
387
|
dirstate.normallookup(realname)
|
|
387
|
dirstate.normallookup(realname)
|
|
388
|
|
|
388
|
|
|
389
|
# copystat=True here and above are a hack to trick any
|
|
389
|
# copystat=True here and above are a hack to trick any
|
|
390
|
# editors that have f open that we haven't modified them.
|
|
390
|
# editors that have f open that we haven't modified them.
|
|
391
|
#
|
|
391
|
#
|
|
392
|
# Also note that this racy as an editor could notice the
|
|
392
|
# Also note that this racy as an editor could notice the
|
|
393
|
# file's mtime before we've finished writing it.
|
|
393
|
# file's mtime before we've finished writing it.
|
|
394
|
util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
|
|
394
|
util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
|
|
395
|
os.unlink(tmpname)
|
|
395
|
os.unlink(tmpname)
|
|
396
|
if tobackup:
|
|
396
|
if tobackup:
|
|
397
|
os.rmdir(backupdir)
|
|
397
|
os.rmdir(backupdir)
|
|
398
|
except OSError:
|
|
398
|
except OSError:
|
|
399
|
pass
|
|
399
|
pass
|
|
400
|
|
|
400
|
|
|
401
|
def recordinwlock(ui, repo, message, match, opts):
|
|
401
|
def recordinwlock(ui, repo, message, match, opts):
|
|
402
|
with repo.wlock():
|
|
402
|
with repo.wlock():
|
|
403
|
return recordfunc(ui, repo, message, match, opts)
|
|
403
|
return recordfunc(ui, repo, message, match, opts)
|
|
404
|
|
|
404
|
|
|
405
|
return commit(ui, repo, recordinwlock, pats, opts)
|
|
405
|
return commit(ui, repo, recordinwlock, pats, opts)
|
|
406
|
|
|
406
|
|
|
407
|
class dirnode(object):
|
|
407
|
class dirnode(object):
|
|
408
|
"""
|
|
408
|
"""
|
|
409
|
Represent a directory in user working copy with information required for
|
|
409
|
Represent a directory in user working copy with information required for
|
|
410
|
the purpose of tersing its status.
|
|
410
|
the purpose of tersing its status.
|
|
411
|
|
|
411
|
|
|
412
|
path is the path to the directory
|
|
412
|
path is the path to the directory
|
|
413
|
|
|
413
|
|
|
414
|
statuses is a set of statuses of all files in this directory (this includes
|
|
414
|
statuses is a set of statuses of all files in this directory (this includes
|
|
415
|
all the files in all the subdirectories too)
|
|
415
|
all the files in all the subdirectories too)
|
|
416
|
|
|
416
|
|
|
417
|
files is a list of files which are direct child of this directory
|
|
417
|
files is a list of files which are direct child of this directory
|
|
418
|
|
|
418
|
|
|
419
|
subdirs is a dictionary of sub-directory name as the key and it's own
|
|
419
|
subdirs is a dictionary of sub-directory name as the key and it's own
|
|
420
|
dirnode object as the value
|
|
420
|
dirnode object as the value
|
|
421
|
"""
|
|
421
|
"""
|
|
422
|
|
|
422
|
|
|
423
|
def __init__(self, dirpath):
|
|
423
|
def __init__(self, dirpath):
|
|
424
|
self.path = dirpath
|
|
424
|
self.path = dirpath
|
|
425
|
self.statuses = set([])
|
|
425
|
self.statuses = set([])
|
|
426
|
self.files = []
|
|
426
|
self.files = []
|
|
427
|
self.subdirs = {}
|
|
427
|
self.subdirs = {}
|
|
428
|
|
|
428
|
|
|
429
|
def _addfileindir(self, filename, status):
|
|
429
|
def _addfileindir(self, filename, status):
|
|
430
|
"""Add a file in this directory as a direct child."""
|
|
430
|
"""Add a file in this directory as a direct child."""
|
|
431
|
self.files.append((filename, status))
|
|
431
|
self.files.append((filename, status))
|
|
432
|
|
|
432
|
|
|
433
|
def addfile(self, filename, status):
|
|
433
|
def addfile(self, filename, status):
|
|
434
|
"""
|
|
434
|
"""
|
|
435
|
Add a file to this directory or to its direct parent directory.
|
|
435
|
Add a file to this directory or to its direct parent directory.
|
|
436
|
|
|
436
|
|
|
437
|
If the file is not direct child of this directory, we traverse to the
|
|
437
|
If the file is not direct child of this directory, we traverse to the
|
|
438
|
directory of which this file is a direct child of and add the file
|
|
438
|
directory of which this file is a direct child of and add the file
|
|
439
|
there.
|
|
439
|
there.
|
|
440
|
"""
|
|
440
|
"""
|
|
441
|
|
|
441
|
|
|
442
|
# the filename contains a path separator, it means it's not the direct
|
|
442
|
# the filename contains a path separator, it means it's not the direct
|
|
443
|
# child of this directory
|
|
443
|
# child of this directory
|
|
444
|
if '/' in filename:
|
|
444
|
if '/' in filename:
|
|
445
|
subdir, filep = filename.split('/', 1)
|
|
445
|
subdir, filep = filename.split('/', 1)
|
|
446
|
|
|
446
|
|
|
447
|
# does the dirnode object for subdir exists
|
|
447
|
# does the dirnode object for subdir exists
|
|
448
|
if subdir not in self.subdirs:
|
|
448
|
if subdir not in self.subdirs:
|
|
449
|
subdirpath = os.path.join(self.path, subdir)
|
|
449
|
subdirpath = os.path.join(self.path, subdir)
|
|
450
|
self.subdirs[subdir] = dirnode(subdirpath)
|
|
450
|
self.subdirs[subdir] = dirnode(subdirpath)
|
|
451
|
|
|
451
|
|
|
452
|
# try adding the file in subdir
|
|
452
|
# try adding the file in subdir
|
|
453
|
self.subdirs[subdir].addfile(filep, status)
|
|
453
|
self.subdirs[subdir].addfile(filep, status)
|
|
454
|
|
|
454
|
|
|
455
|
else:
|
|
455
|
else:
|
|
456
|
self._addfileindir(filename, status)
|
|
456
|
self._addfileindir(filename, status)
|
|
457
|
|
|
457
|
|
|
458
|
if status not in self.statuses:
|
|
458
|
if status not in self.statuses:
|
|
459
|
self.statuses.add(status)
|
|
459
|
self.statuses.add(status)
|
|
460
|
|
|
460
|
|
|
461
|
def iterfilepaths(self):
|
|
461
|
def iterfilepaths(self):
|
|
462
|
"""Yield (status, path) for files directly under this directory."""
|
|
462
|
"""Yield (status, path) for files directly under this directory."""
|
|
463
|
for f, st in self.files:
|
|
463
|
for f, st in self.files:
|
|
464
|
yield st, os.path.join(self.path, f)
|
|
464
|
yield st, os.path.join(self.path, f)
|
|
465
|
|
|
465
|
|
|
466
|
def tersewalk(self, terseargs):
|
|
466
|
def tersewalk(self, terseargs):
|
|
467
|
"""
|
|
467
|
"""
|
|
468
|
Yield (status, path) obtained by processing the status of this
|
|
468
|
Yield (status, path) obtained by processing the status of this
|
|
469
|
dirnode.
|
|
469
|
dirnode.
|
|
470
|
|
|
470
|
|
|
471
|
terseargs is the string of arguments passed by the user with `--terse`
|
|
471
|
terseargs is the string of arguments passed by the user with `--terse`
|
|
472
|
flag.
|
|
472
|
flag.
|
|
473
|
|
|
473
|
|
|
474
|
Following are the cases which can happen:
|
|
474
|
Following are the cases which can happen:
|
|
475
|
|
|
475
|
|
|
476
|
1) All the files in the directory (including all the files in its
|
|
476
|
1) All the files in the directory (including all the files in its
|
|
477
|
subdirectories) share the same status and the user has asked us to terse
|
|
477
|
subdirectories) share the same status and the user has asked us to terse
|
|
478
|
that status. -> yield (status, dirpath)
|
|
478
|
that status. -> yield (status, dirpath)
|
|
479
|
|
|
479
|
|
|
480
|
2) Otherwise, we do following:
|
|
480
|
2) Otherwise, we do following:
|
|
481
|
|
|
481
|
|
|
482
|
a) Yield (status, filepath) for all the files which are in this
|
|
482
|
a) Yield (status, filepath) for all the files which are in this
|
|
483
|
directory (only the ones in this directory, not the subdirs)
|
|
483
|
directory (only the ones in this directory, not the subdirs)
|
|
484
|
|
|
484
|
|
|
485
|
b) Recurse the function on all the subdirectories of this
|
|
485
|
b) Recurse the function on all the subdirectories of this
|
|
486
|
directory
|
|
486
|
directory
|
|
487
|
"""
|
|
487
|
"""
|
|
488
|
|
|
488
|
|
|
489
|
if len(self.statuses) == 1:
|
|
489
|
if len(self.statuses) == 1:
|
|
490
|
onlyst = self.statuses.pop()
|
|
490
|
onlyst = self.statuses.pop()
|
|
491
|
|
|
491
|
|
|
492
|
# Making sure we terse only when the status abbreviation is
|
|
492
|
# Making sure we terse only when the status abbreviation is
|
|
493
|
# passed as terse argument
|
|
493
|
# passed as terse argument
|
|
494
|
if onlyst in terseargs:
|
|
494
|
if onlyst in terseargs:
|
|
495
|
yield onlyst, self.path + pycompat.ossep
|
|
495
|
yield onlyst, self.path + pycompat.ossep
|
|
496
|
return
|
|
496
|
return
|
|
497
|
|
|
497
|
|
|
498
|
# add the files to status list
|
|
498
|
# add the files to status list
|
|
499
|
for st, fpath in self.iterfilepaths():
|
|
499
|
for st, fpath in self.iterfilepaths():
|
|
500
|
yield st, fpath
|
|
500
|
yield st, fpath
|
|
501
|
|
|
501
|
|
|
502
|
#recurse on the subdirs
|
|
502
|
#recurse on the subdirs
|
|
503
|
for dirobj in self.subdirs.values():
|
|
503
|
for dirobj in self.subdirs.values():
|
|
504
|
for st, fpath in dirobj.tersewalk(terseargs):
|
|
504
|
for st, fpath in dirobj.tersewalk(terseargs):
|
|
505
|
yield st, fpath
|
|
505
|
yield st, fpath
|
|
506
|
|
|
506
|
|
|
507
|
def tersedir(statuslist, terseargs):
|
|
507
|
def tersedir(statuslist, terseargs):
|
|
508
|
"""
|
|
508
|
"""
|
|
509
|
Terse the status if all the files in a directory shares the same status.
|
|
509
|
Terse the status if all the files in a directory shares the same status.
|
|
510
|
|
|
510
|
|
|
511
|
statuslist is scmutil.status() object which contains a list of files for
|
|
511
|
statuslist is scmutil.status() object which contains a list of files for
|
|
512
|
each status.
|
|
512
|
each status.
|
|
513
|
terseargs is string which is passed by the user as the argument to `--terse`
|
|
513
|
terseargs is string which is passed by the user as the argument to `--terse`
|
|
514
|
flag.
|
|
514
|
flag.
|
|
515
|
|
|
515
|
|
|
516
|
The function makes a tree of objects of dirnode class, and at each node it
|
|
516
|
The function makes a tree of objects of dirnode class, and at each node it
|
|
517
|
stores the information required to know whether we can terse a certain
|
|
517
|
stores the information required to know whether we can terse a certain
|
|
518
|
directory or not.
|
|
518
|
directory or not.
|
|
519
|
"""
|
|
519
|
"""
|
|
520
|
# the order matters here as that is used to produce final list
|
|
520
|
# the order matters here as that is used to produce final list
|
|
521
|
allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
|
|
521
|
allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
|
|
522
|
|
|
522
|
|
|
523
|
# checking the argument validity
|
|
523
|
# checking the argument validity
|
|
524
|
for s in pycompat.bytestr(terseargs):
|
|
524
|
for s in pycompat.bytestr(terseargs):
|
|
525
|
if s not in allst:
|
|
525
|
if s not in allst:
|
|
526
|
raise error.Abort(_("'%s' not recognized") % s)
|
|
526
|
raise error.Abort(_("'%s' not recognized") % s)
|
|
527
|
|
|
527
|
|
|
528
|
# creating a dirnode object for the root of the repo
|
|
528
|
# creating a dirnode object for the root of the repo
|
|
529
|
rootobj = dirnode('')
|
|
529
|
rootobj = dirnode('')
|
|
530
|
pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
|
|
530
|
pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
|
|
531
|
'ignored', 'removed')
|
|
531
|
'ignored', 'removed')
|
|
532
|
|
|
532
|
|
|
533
|
tersedict = {}
|
|
533
|
tersedict = {}
|
|
534
|
for attrname in pstatus:
|
|
534
|
for attrname in pstatus:
|
|
535
|
statuschar = attrname[0:1]
|
|
535
|
statuschar = attrname[0:1]
|
|
536
|
for f in getattr(statuslist, attrname):
|
|
536
|
for f in getattr(statuslist, attrname):
|
|
537
|
rootobj.addfile(f, statuschar)
|
|
537
|
rootobj.addfile(f, statuschar)
|
|
538
|
tersedict[statuschar] = []
|
|
538
|
tersedict[statuschar] = []
|
|
539
|
|
|
539
|
|
|
540
|
# we won't be tersing the root dir, so add files in it
|
|
540
|
# we won't be tersing the root dir, so add files in it
|
|
541
|
for st, fpath in rootobj.iterfilepaths():
|
|
541
|
for st, fpath in rootobj.iterfilepaths():
|
|
542
|
tersedict[st].append(fpath)
|
|
542
|
tersedict[st].append(fpath)
|
|
543
|
|
|
543
|
|
|
544
|
# process each sub-directory and build tersedict
|
|
544
|
# process each sub-directory and build tersedict
|
|
545
|
for subdir in rootobj.subdirs.values():
|
|
545
|
for subdir in rootobj.subdirs.values():
|
|
546
|
for st, f in subdir.tersewalk(terseargs):
|
|
546
|
for st, f in subdir.tersewalk(terseargs):
|
|
547
|
tersedict[st].append(f)
|
|
547
|
tersedict[st].append(f)
|
|
548
|
|
|
548
|
|
|
549
|
tersedlist = []
|
|
549
|
tersedlist = []
|
|
550
|
for st in allst:
|
|
550
|
for st in allst:
|
|
551
|
tersedict[st].sort()
|
|
551
|
tersedict[st].sort()
|
|
552
|
tersedlist.append(tersedict[st])
|
|
552
|
tersedlist.append(tersedict[st])
|
|
553
|
|
|
553
|
|
|
554
|
return tersedlist
|
|
554
|
return tersedlist
|
|
555
|
|
|
555
|
|
|
556
|
def _commentlines(raw):
|
|
556
|
def _commentlines(raw):
|
|
557
|
'''Surround lineswith a comment char and a new line'''
|
|
557
|
'''Surround lineswith a comment char and a new line'''
|
|
558
|
lines = raw.splitlines()
|
|
558
|
lines = raw.splitlines()
|
|
559
|
commentedlines = ['# %s' % line for line in lines]
|
|
559
|
commentedlines = ['# %s' % line for line in lines]
|
|
560
|
return '\n'.join(commentedlines) + '\n'
|
|
560
|
return '\n'.join(commentedlines) + '\n'
|
|
561
|
|
|
561
|
|
|
562
|
def _conflictsmsg(repo):
|
|
562
|
def _conflictsmsg(repo):
|
|
563
|
# avoid merge cycle
|
|
563
|
# avoid merge cycle
|
|
564
|
from . import merge as mergemod
|
|
564
|
from . import merge as mergemod
|
|
565
|
mergestate = mergemod.mergestate.read(repo)
|
|
565
|
mergestate = mergemod.mergestate.read(repo)
|
|
566
|
if not mergestate.active():
|
|
566
|
if not mergestate.active():
|
|
567
|
return
|
|
567
|
return
|
|
568
|
|
|
568
|
|
|
569
|
m = scmutil.match(repo[None])
|
|
569
|
m = scmutil.match(repo[None])
|
|
570
|
unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
|
|
570
|
unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
|
|
571
|
if unresolvedlist:
|
|
571
|
if unresolvedlist:
|
|
572
|
mergeliststr = '\n'.join(
|
|
572
|
mergeliststr = '\n'.join(
|
|
573
|
[' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
|
|
573
|
[' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
|
|
574
|
for path in unresolvedlist])
|
|
574
|
for path in unresolvedlist])
|
|
575
|
msg = _('''Unresolved merge conflicts:
|
|
575
|
msg = _('''Unresolved merge conflicts:
|
|
576
|
|
|
576
|
|
|
577
|
%s
|
|
577
|
%s
|
|
578
|
|
|
578
|
|
|
579
|
To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
|
|
579
|
To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
|
|
580
|
else:
|
|
580
|
else:
|
|
581
|
msg = _('No unresolved merge conflicts.')
|
|
581
|
msg = _('No unresolved merge conflicts.')
|
|
582
|
|
|
582
|
|
|
583
|
return _commentlines(msg)
|
|
583
|
return _commentlines(msg)
|
|
584
|
|
|
584
|
|
|
585
|
def _helpmessage(continuecmd, abortcmd):
|
|
585
|
def _helpmessage(continuecmd, abortcmd):
|
|
586
|
msg = _('To continue: %s\n'
|
|
586
|
msg = _('To continue: %s\n'
|
|
587
|
'To abort: %s') % (continuecmd, abortcmd)
|
|
587
|
'To abort: %s') % (continuecmd, abortcmd)
|
|
588
|
return _commentlines(msg)
|
|
588
|
return _commentlines(msg)
|
|
589
|
|
|
589
|
|
|
590
|
def _rebasemsg():
|
|
590
|
def _rebasemsg():
|
|
591
|
return _helpmessage('hg rebase --continue', 'hg rebase --abort')
|
|
591
|
return _helpmessage('hg rebase --continue', 'hg rebase --abort')
|
|
592
|
|
|
592
|
|
|
593
|
def _histeditmsg():
|
|
593
|
def _histeditmsg():
|
|
594
|
return _helpmessage('hg histedit --continue', 'hg histedit --abort')
|
|
594
|
return _helpmessage('hg histedit --continue', 'hg histedit --abort')
|
|
595
|
|
|
595
|
|
|
596
|
def _unshelvemsg():
|
|
596
|
def _unshelvemsg():
|
|
597
|
return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
|
|
597
|
return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
|
|
598
|
|
|
598
|
|
|
599
|
def _updatecleanmsg(dest=None):
|
|
599
|
def _updatecleanmsg(dest=None):
|
|
600
|
warning = _('warning: this will discard uncommitted changes')
|
|
600
|
warning = _('warning: this will discard uncommitted changes')
|
|
601
|
return 'hg update --clean %s (%s)' % (dest or '.', warning)
|
|
601
|
return 'hg update --clean %s (%s)' % (dest or '.', warning)
|
|
602
|
|
|
602
|
|
|
603
|
def _graftmsg():
|
|
603
|
def _graftmsg():
|
|
604
|
# tweakdefaults requires `update` to have a rev hence the `.`
|
|
604
|
# tweakdefaults requires `update` to have a rev hence the `.`
|
|
605
|
return _helpmessage('hg graft --continue', _updatecleanmsg())
|
|
605
|
return _helpmessage('hg graft --continue', _updatecleanmsg())
|
|
606
|
|
|
606
|
|
|
607
|
def _mergemsg():
|
|
607
|
def _mergemsg():
|
|
608
|
# tweakdefaults requires `update` to have a rev hence the `.`
|
|
608
|
# tweakdefaults requires `update` to have a rev hence the `.`
|
|
609
|
return _helpmessage('hg commit', _updatecleanmsg())
|
|
609
|
return _helpmessage('hg commit', _updatecleanmsg())
|
|
610
|
|
|
610
|
|
|
611
|
def _bisectmsg():
|
|
611
|
def _bisectmsg():
|
|
612
|
msg = _('To mark the changeset good: hg bisect --good\n'
|
|
612
|
msg = _('To mark the changeset good: hg bisect --good\n'
|
|
613
|
'To mark the changeset bad: hg bisect --bad\n'
|
|
613
|
'To mark the changeset bad: hg bisect --bad\n'
|
|
614
|
'To abort: hg bisect --reset\n')
|
|
614
|
'To abort: hg bisect --reset\n')
|
|
615
|
return _commentlines(msg)
|
|
615
|
return _commentlines(msg)
|
|
616
|
|
|
616
|
|
|
617
|
def fileexistspredicate(filename):
|
|
617
|
def fileexistspredicate(filename):
|
|
618
|
return lambda repo: repo.vfs.exists(filename)
|
|
618
|
return lambda repo: repo.vfs.exists(filename)
|
|
619
|
|
|
619
|
|
|
620
|
def _mergepredicate(repo):
|
|
620
|
def _mergepredicate(repo):
|
|
621
|
return len(repo[None].parents()) > 1
|
|
621
|
return len(repo[None].parents()) > 1
|
|
622
|
|
|
622
|
|
|
623
|
STATES = (
|
|
623
|
STATES = (
|
|
624
|
# (state, predicate to detect states, helpful message function)
|
|
624
|
# (state, predicate to detect states, helpful message function)
|
|
625
|
('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
|
|
625
|
('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
|
|
626
|
('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
|
|
626
|
('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
|
|
627
|
('graft', fileexistspredicate('graftstate'), _graftmsg),
|
|
627
|
('graft', fileexistspredicate('graftstate'), _graftmsg),
|
|
628
|
('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
|
|
628
|
('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
|
|
629
|
('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
|
|
629
|
('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
|
|
630
|
# The merge state is part of a list that will be iterated over.
|
|
630
|
# The merge state is part of a list that will be iterated over.
|
|
631
|
# They need to be last because some of the other unfinished states may also
|
|
631
|
# They need to be last because some of the other unfinished states may also
|
|
632
|
# be in a merge or update state (eg. rebase, histedit, graft, etc).
|
|
632
|
# be in a merge or update state (eg. rebase, histedit, graft, etc).
|
|
633
|
# We want those to have priority.
|
|
633
|
# We want those to have priority.
|
|
634
|
('merge', _mergepredicate, _mergemsg),
|
|
634
|
('merge', _mergepredicate, _mergemsg),
|
|
635
|
)
|
|
635
|
)
|
|
636
|
|
|
636
|
|
|
637
|
def _getrepostate(repo):
|
|
637
|
def _getrepostate(repo):
|
|
638
|
# experimental config: commands.status.skipstates
|
|
638
|
# experimental config: commands.status.skipstates
|
|
639
|
skip = set(repo.ui.configlist('commands', 'status.skipstates'))
|
|
639
|
skip = set(repo.ui.configlist('commands', 'status.skipstates'))
|
|
640
|
for state, statedetectionpredicate, msgfn in STATES:
|
|
640
|
for state, statedetectionpredicate, msgfn in STATES:
|
|
641
|
if state in skip:
|
|
641
|
if state in skip:
|
|
642
|
continue
|
|
642
|
continue
|
|
643
|
if statedetectionpredicate(repo):
|
|
643
|
if statedetectionpredicate(repo):
|
|
644
|
return (state, statedetectionpredicate, msgfn)
|
|
644
|
return (state, statedetectionpredicate, msgfn)
|
|
645
|
|
|
645
|
|
|
646
|
def morestatus(repo, fm):
|
|
646
|
def morestatus(repo, fm):
|
|
647
|
statetuple = _getrepostate(repo)
|
|
647
|
statetuple = _getrepostate(repo)
|
|
648
|
label = 'status.morestatus'
|
|
648
|
label = 'status.morestatus'
|
|
649
|
if statetuple:
|
|
649
|
if statetuple:
|
|
650
|
fm.startitem()
|
|
650
|
fm.startitem()
|
|
651
|
state, statedetectionpredicate, helpfulmsg = statetuple
|
|
651
|
state, statedetectionpredicate, helpfulmsg = statetuple
|
|
652
|
statemsg = _('The repository is in an unfinished *%s* state.') % state
|
|
652
|
statemsg = _('The repository is in an unfinished *%s* state.') % state
|
|
653
|
fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
|
|
653
|
fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
|
|
654
|
conmsg = _conflictsmsg(repo)
|
|
654
|
conmsg = _conflictsmsg(repo)
|
|
655
|
if conmsg:
|
|
655
|
if conmsg:
|
|
656
|
fm.write('conflictsmsg', '%s\n', conmsg, label=label)
|
|
656
|
fm.write('conflictsmsg', '%s\n', conmsg, label=label)
|
|
657
|
if helpfulmsg:
|
|
657
|
if helpfulmsg:
|
|
658
|
helpmsg = helpfulmsg()
|
|
658
|
helpmsg = helpfulmsg()
|
|
659
|
fm.write('helpmsg', '%s\n', helpmsg, label=label)
|
|
659
|
fm.write('helpmsg', '%s\n', helpmsg, label=label)
|
|
660
|
|
|
660
|
|
|
661
|
def findpossible(cmd, table, strict=False):
|
|
661
|
def findpossible(cmd, table, strict=False):
|
|
662
|
"""
|
|
662
|
"""
|
|
663
|
Return cmd -> (aliases, command table entry)
|
|
663
|
Return cmd -> (aliases, command table entry)
|
|
664
|
for each matching command.
|
|
664
|
for each matching command.
|
|
665
|
Return debug commands (or their aliases) only if no normal command matches.
|
|
665
|
Return debug commands (or their aliases) only if no normal command matches.
|
|
666
|
"""
|
|
666
|
"""
|
|
667
|
choice = {}
|
|
667
|
choice = {}
|
|
668
|
debugchoice = {}
|
|
668
|
debugchoice = {}
|
|
669
|
|
|
669
|
|
|
670
|
if cmd in table:
|
|
670
|
if cmd in table:
|
|
671
|
# short-circuit exact matches, "log" alias beats "^log|history"
|
|
671
|
# short-circuit exact matches, "log" alias beats "^log|history"
|
|
672
|
keys = [cmd]
|
|
672
|
keys = [cmd]
|
|
673
|
else:
|
|
673
|
else:
|
|
674
|
keys = table.keys()
|
|
674
|
keys = table.keys()
|
|
675
|
|
|
675
|
|
|
676
|
allcmds = []
|
|
676
|
allcmds = []
|
|
677
|
for e in keys:
|
|
677
|
for e in keys:
|
|
678
|
aliases = parsealiases(e)
|
|
678
|
aliases = parsealiases(e)
|
|
679
|
allcmds.extend(aliases)
|
|
679
|
allcmds.extend(aliases)
|
|
680
|
found = None
|
|
680
|
found = None
|
|
681
|
if cmd in aliases:
|
|
681
|
if cmd in aliases:
|
|
682
|
found = cmd
|
|
682
|
found = cmd
|
|
683
|
elif not strict:
|
|
683
|
elif not strict:
|
|
684
|
for a in aliases:
|
|
684
|
for a in aliases:
|
|
685
|
if a.startswith(cmd):
|
|
685
|
if a.startswith(cmd):
|
|
686
|
found = a
|
|
686
|
found = a
|
|
687
|
break
|
|
687
|
break
|
|
688
|
if found is not None:
|
|
688
|
if found is not None:
|
|
689
|
if aliases[0].startswith("debug") or found.startswith("debug"):
|
|
689
|
if aliases[0].startswith("debug") or found.startswith("debug"):
|
|
690
|
debugchoice[found] = (aliases, table[e])
|
|
690
|
debugchoice[found] = (aliases, table[e])
|
|
691
|
else:
|
|
691
|
else:
|
|
692
|
choice[found] = (aliases, table[e])
|
|
692
|
choice[found] = (aliases, table[e])
|
|
693
|
|
|
693
|
|
|
694
|
if not choice and debugchoice:
|
|
694
|
if not choice and debugchoice:
|
|
695
|
choice = debugchoice
|
|
695
|
choice = debugchoice
|
|
696
|
|
|
696
|
|
|
697
|
return choice, allcmds
|
|
697
|
return choice, allcmds
|
|
698
|
|
|
698
|
|
|
699
|
def findcmd(cmd, table, strict=True):
|
|
699
|
def findcmd(cmd, table, strict=True):
|
|
700
|
"""Return (aliases, command table entry) for command string."""
|
|
700
|
"""Return (aliases, command table entry) for command string."""
|
|
701
|
choice, allcmds = findpossible(cmd, table, strict)
|
|
701
|
choice, allcmds = findpossible(cmd, table, strict)
|
|
702
|
|
|
702
|
|
|
703
|
if cmd in choice:
|
|
703
|
if cmd in choice:
|
|
704
|
return choice[cmd]
|
|
704
|
return choice[cmd]
|
|
705
|
|
|
705
|
|
|
706
|
if len(choice) > 1:
|
|
706
|
if len(choice) > 1:
|
|
707
|
clist = sorted(choice)
|
|
707
|
clist = sorted(choice)
|
|
708
|
raise error.AmbiguousCommand(cmd, clist)
|
|
708
|
raise error.AmbiguousCommand(cmd, clist)
|
|
709
|
|
|
709
|
|
|
710
|
if choice:
|
|
710
|
if choice:
|
|
711
|
return list(choice.values())[0]
|
|
711
|
return list(choice.values())[0]
|
|
712
|
|
|
712
|
|
|
713
|
raise error.UnknownCommand(cmd, allcmds)
|
|
713
|
raise error.UnknownCommand(cmd, allcmds)
|
|
714
|
|
|
714
|
|
|
715
|
def findrepo(p):
|
|
715
|
def findrepo(p):
|
|
716
|
while not os.path.isdir(os.path.join(p, ".hg")):
|
|
716
|
while not os.path.isdir(os.path.join(p, ".hg")):
|
|
717
|
oldp, p = p, os.path.dirname(p)
|
|
717
|
oldp, p = p, os.path.dirname(p)
|
|
718
|
if p == oldp:
|
|
718
|
if p == oldp:
|
|
719
|
return None
|
|
719
|
return None
|
|
720
|
|
|
720
|
|
|
721
|
return p
|
|
721
|
return p
|
|
722
|
|
|
722
|
|
|
723
|
def bailifchanged(repo, merge=True, hint=None):
|
|
723
|
def bailifchanged(repo, merge=True, hint=None):
|
|
724
|
""" enforce the precondition that working directory must be clean.
|
|
724
|
""" enforce the precondition that working directory must be clean.
|
|
725
|
|
|
725
|
|
|
726
|
'merge' can be set to false if a pending uncommitted merge should be
|
|
726
|
'merge' can be set to false if a pending uncommitted merge should be
|
|
727
|
ignored (such as when 'update --check' runs).
|
|
727
|
ignored (such as when 'update --check' runs).
|
|
728
|
|
|
728
|
|
|
729
|
'hint' is the usual hint given to Abort exception.
|
|
729
|
'hint' is the usual hint given to Abort exception.
|
|
730
|
"""
|
|
730
|
"""
|
|
731
|
|
|
731
|
|
|
732
|
if merge and repo.dirstate.p2() != nullid:
|
|
732
|
if merge and repo.dirstate.p2() != nullid:
|
|
733
|
raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
|
|
733
|
raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
|
|
734
|
modified, added, removed, deleted = repo.status()[:4]
|
|
734
|
modified, added, removed, deleted = repo.status()[:4]
|
|
735
|
if modified or added or removed or deleted:
|
|
735
|
if modified or added or removed or deleted:
|
|
736
|
raise error.Abort(_('uncommitted changes'), hint=hint)
|
|
736
|
raise error.Abort(_('uncommitted changes'), hint=hint)
|
|
737
|
ctx = repo[None]
|
|
737
|
ctx = repo[None]
|
|
738
|
for s in sorted(ctx.substate):
|
|
738
|
for s in sorted(ctx.substate):
|
|
739
|
ctx.sub(s).bailifchanged(hint=hint)
|
|
739
|
ctx.sub(s).bailifchanged(hint=hint)
|
|
740
|
|
|
740
|
|
|
741
|
def logmessage(ui, opts):
|
|
741
|
def logmessage(ui, opts):
|
|
742
|
""" get the log message according to -m and -l option """
|
|
742
|
""" get the log message according to -m and -l option """
|
|
743
|
message = opts.get('message')
|
|
743
|
message = opts.get('message')
|
|
744
|
logfile = opts.get('logfile')
|
|
744
|
logfile = opts.get('logfile')
|
|
745
|
|
|
745
|
|
|
746
|
if message and logfile:
|
|
746
|
if message and logfile:
|
|
747
|
raise error.Abort(_('options --message and --logfile are mutually '
|
|
747
|
raise error.Abort(_('options --message and --logfile are mutually '
|
|
748
|
'exclusive'))
|
|
748
|
'exclusive'))
|
|
749
|
if not message and logfile:
|
|
749
|
if not message and logfile:
|
|
750
|
try:
|
|
750
|
try:
|
|
751
|
if isstdiofilename(logfile):
|
|
751
|
if isstdiofilename(logfile):
|
|
752
|
message = ui.fin.read()
|
|
752
|
message = ui.fin.read()
|
|
753
|
else:
|
|
753
|
else:
|
|
754
|
message = '\n'.join(util.readfile(logfile).splitlines())
|
|
754
|
message = '\n'.join(util.readfile(logfile).splitlines())
|
|
755
|
except IOError as inst:
|
|
755
|
except IOError as inst:
|
|
756
|
raise error.Abort(_("can't read commit message '%s': %s") %
|
|
756
|
raise error.Abort(_("can't read commit message '%s': %s") %
|
|
757
|
(logfile, encoding.strtolocal(inst.strerror)))
|
|
757
|
(logfile, encoding.strtolocal(inst.strerror)))
|
|
758
|
return message
|
|
758
|
return message
|
|
759
|
|
|
759
|
|
|
760
|
def mergeeditform(ctxorbool, baseformname):
|
|
760
|
def mergeeditform(ctxorbool, baseformname):
|
|
761
|
"""return appropriate editform name (referencing a committemplate)
|
|
761
|
"""return appropriate editform name (referencing a committemplate)
|
|
762
|
|
|
762
|
|
|
763
|
'ctxorbool' is either a ctx to be committed, or a bool indicating whether
|
|
763
|
'ctxorbool' is either a ctx to be committed, or a bool indicating whether
|
|
764
|
merging is committed.
|
|
764
|
merging is committed.
|
|
765
|
|
|
765
|
|
|
766
|
This returns baseformname with '.merge' appended if it is a merge,
|
|
766
|
This returns baseformname with '.merge' appended if it is a merge,
|
|
767
|
otherwise '.normal' is appended.
|
|
767
|
otherwise '.normal' is appended.
|
|
768
|
"""
|
|
768
|
"""
|
|
769
|
if isinstance(ctxorbool, bool):
|
|
769
|
if isinstance(ctxorbool, bool):
|
|
770
|
if ctxorbool:
|
|
770
|
if ctxorbool:
|
|
771
|
return baseformname + ".merge"
|
|
771
|
return baseformname + ".merge"
|
|
772
|
elif 1 < len(ctxorbool.parents()):
|
|
772
|
elif 1 < len(ctxorbool.parents()):
|
|
773
|
return baseformname + ".merge"
|
|
773
|
return baseformname + ".merge"
|
|
774
|
|
|
774
|
|
|
775
|
return baseformname + ".normal"
|
|
775
|
return baseformname + ".normal"
|
|
776
|
|
|
776
|
|
|
777
|
def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
|
|
777
|
def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
|
|
778
|
editform='', **opts):
|
|
778
|
editform='', **opts):
|
|
779
|
"""get appropriate commit message editor according to '--edit' option
|
|
779
|
"""get appropriate commit message editor according to '--edit' option
|
|
780
|
|
|
780
|
|
|
781
|
'finishdesc' is a function to be called with edited commit message
|
|
781
|
'finishdesc' is a function to be called with edited commit message
|
|
782
|
(= 'description' of the new changeset) just after editing, but
|
|
782
|
(= 'description' of the new changeset) just after editing, but
|
|
783
|
before checking empty-ness. It should return actual text to be
|
|
783
|
before checking empty-ness. It should return actual text to be
|
|
784
|
stored into history. This allows to change description before
|
|
784
|
stored into history. This allows to change description before
|
|
785
|
storing.
|
|
785
|
storing.
|
|
786
|
|
|
786
|
|
|
787
|
'extramsg' is a extra message to be shown in the editor instead of
|
|
787
|
'extramsg' is a extra message to be shown in the editor instead of
|
|
788
|
'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
|
|
788
|
'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
|
|
789
|
is automatically added.
|
|
789
|
is automatically added.
|
|
790
|
|
|
790
|
|
|
791
|
'editform' is a dot-separated list of names, to distinguish
|
|
791
|
'editform' is a dot-separated list of names, to distinguish
|
|
792
|
the purpose of commit text editing.
|
|
792
|
the purpose of commit text editing.
|
|
793
|
|
|
793
|
|
|
794
|
'getcommiteditor' returns 'commitforceeditor' regardless of
|
|
794
|
'getcommiteditor' returns 'commitforceeditor' regardless of
|
|
795
|
'edit', if one of 'finishdesc' or 'extramsg' is specified, because
|
|
795
|
'edit', if one of 'finishdesc' or 'extramsg' is specified, because
|
|
796
|
they are specific for usage in MQ.
|
|
796
|
they are specific for usage in MQ.
|
|
797
|
"""
|
|
797
|
"""
|
|
798
|
if edit or finishdesc or extramsg:
|
|
798
|
if edit or finishdesc or extramsg:
|
|
799
|
return lambda r, c, s: commitforceeditor(r, c, s,
|
|
799
|
return lambda r, c, s: commitforceeditor(r, c, s,
|
|
800
|
finishdesc=finishdesc,
|
|
800
|
finishdesc=finishdesc,
|
|
801
|
extramsg=extramsg,
|
|
801
|
extramsg=extramsg,
|
|
802
|
editform=editform)
|
|
802
|
editform=editform)
|
|
803
|
elif editform:
|
|
803
|
elif editform:
|
|
804
|
return lambda r, c, s: commiteditor(r, c, s, editform=editform)
|
|
804
|
return lambda r, c, s: commiteditor(r, c, s, editform=editform)
|
|
805
|
else:
|
|
805
|
else:
|
|
806
|
return commiteditor
|
|
806
|
return commiteditor
|
|
807
|
|
|
807
|
|
|
808
|
def loglimit(opts):
|
|
808
|
def loglimit(opts):
|
|
809
|
"""get the log limit according to option -l/--limit"""
|
|
809
|
"""get the log limit according to option -l/--limit"""
|
|
810
|
limit = opts.get('limit')
|
|
810
|
limit = opts.get('limit')
|
|
811
|
if limit:
|
|
811
|
if limit:
|
|
812
|
try:
|
|
812
|
try:
|
|
813
|
limit = int(limit)
|
|
813
|
limit = int(limit)
|
|
814
|
except ValueError:
|
|
814
|
except ValueError:
|
|
815
|
raise error.Abort(_('limit must be a positive integer'))
|
|
815
|
raise error.Abort(_('limit must be a positive integer'))
|
|
816
|
if limit <= 0:
|
|
816
|
if limit <= 0:
|
|
817
|
raise error.Abort(_('limit must be positive'))
|
|
817
|
raise error.Abort(_('limit must be positive'))
|
|
818
|
else:
|
|
818
|
else:
|
|
819
|
limit = None
|
|
819
|
limit = None
|
|
820
|
return limit
|
|
820
|
return limit
|
|
821
|
|
|
821
|
|
|
822
|
def makefilename(repo, pat, node, desc=None,
|
|
822
|
def makefilename(repo, pat, node, desc=None,
|
|
823
|
total=None, seqno=None, revwidth=None, pathname=None):
|
|
823
|
total=None, seqno=None, revwidth=None, pathname=None):
|
|
824
|
node_expander = {
|
|
824
|
node_expander = {
|
|
825
|
'H': lambda: hex(node),
|
|
825
|
'H': lambda: hex(node),
|
|
826
|
'R': lambda: '%d' % repo.changelog.rev(node),
|
|
826
|
'R': lambda: '%d' % repo.changelog.rev(node),
|
|
827
|
'h': lambda: short(node),
|
|
827
|
'h': lambda: short(node),
|
|
828
|
'm': lambda: re.sub('[^\w]', '_', desc or '')
|
|
828
|
'm': lambda: re.sub('[^\w]', '_', desc or '')
|
|
829
|
}
|
|
829
|
}
|
|
830
|
expander = {
|
|
830
|
expander = {
|
|
831
|
'%': lambda: '%',
|
|
831
|
'%': lambda: '%',
|
|
832
|
'b': lambda: os.path.basename(repo.root),
|
|
832
|
'b': lambda: os.path.basename(repo.root),
|
|
833
|
}
|
|
833
|
}
|
|
834
|
|
|
834
|
|
|
835
|
try:
|
|
835
|
try:
|
|
836
|
if node:
|
|
836
|
if node:
|
|
837
|
expander.update(node_expander)
|
|
837
|
expander.update(node_expander)
|
|
838
|
if node:
|
|
838
|
if node:
|
|
839
|
expander['r'] = (lambda:
|
|
839
|
expander['r'] = (lambda:
|
|
840
|
('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
|
|
840
|
('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
|
|
841
|
if total is not None:
|
|
841
|
if total is not None:
|
|
842
|
expander['N'] = lambda: '%d' % total
|
|
842
|
expander['N'] = lambda: '%d' % total
|
|
843
|
if seqno is not None:
|
|
843
|
if seqno is not None:
|
|
844
|
expander['n'] = lambda: '%d' % seqno
|
|
844
|
expander['n'] = lambda: '%d' % seqno
|
|
845
|
if total is not None and seqno is not None:
|
|
845
|
if total is not None and seqno is not None:
|
|
846
|
expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
|
|
846
|
expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
|
|
847
|
if pathname is not None:
|
|
847
|
if pathname is not None:
|
|
848
|
expander['s'] = lambda: os.path.basename(pathname)
|
|
848
|
expander['s'] = lambda: os.path.basename(pathname)
|
|
849
|
expander['d'] = lambda: os.path.dirname(pathname) or '.'
|
|
849
|
expander['d'] = lambda: os.path.dirname(pathname) or '.'
|
|
850
|
expander['p'] = lambda: pathname
|
|
850
|
expander['p'] = lambda: pathname
|
|
851
|
|
|
851
|
|
|
852
|
newname = []
|
|
852
|
newname = []
|
|
853
|
patlen = len(pat)
|
|
853
|
patlen = len(pat)
|
|
854
|
i = 0
|
|
854
|
i = 0
|
|
855
|
while i < patlen:
|
|
855
|
while i < patlen:
|
|
856
|
c = pat[i:i + 1]
|
|
856
|
c = pat[i:i + 1]
|
|
857
|
if c == '%':
|
|
857
|
if c == '%':
|
|
858
|
i += 1
|
|
858
|
i += 1
|
|
859
|
c = pat[i:i + 1]
|
|
859
|
c = pat[i:i + 1]
|
|
860
|
c = expander[c]()
|
|
860
|
c = expander[c]()
|
|
861
|
newname.append(c)
|
|
861
|
newname.append(c)
|
|
862
|
i += 1
|
|
862
|
i += 1
|
|
863
|
return ''.join(newname)
|
|
863
|
return ''.join(newname)
|
|
864
|
except KeyError as inst:
|
|
864
|
except KeyError as inst:
|
|
865
|
raise error.Abort(_("invalid format spec '%%%s' in output filename") %
|
|
865
|
raise error.Abort(_("invalid format spec '%%%s' in output filename") %
|
|
866
|
inst.args[0])
|
|
866
|
inst.args[0])
|
|
867
|
|
|
867
|
|
|
868
|
def isstdiofilename(pat):
|
|
868
|
def isstdiofilename(pat):
|
|
869
|
"""True if the given pat looks like a filename denoting stdin/stdout"""
|
|
869
|
"""True if the given pat looks like a filename denoting stdin/stdout"""
|
|
870
|
return not pat or pat == '-'
|
|
870
|
return not pat or pat == '-'
|
|
871
|
|
|
871
|
|
|
872
|
class _unclosablefile(object):
|
|
872
|
class _unclosablefile(object):
|
|
873
|
def __init__(self, fp):
|
|
873
|
def __init__(self, fp):
|
|
874
|
self._fp = fp
|
|
874
|
self._fp = fp
|
|
875
|
|
|
875
|
|
|
876
|
def close(self):
|
|
876
|
def close(self):
|
|
877
|
pass
|
|
877
|
pass
|
|
878
|
|
|
878
|
|
|
879
|
def __iter__(self):
|
|
879
|
def __iter__(self):
|
|
880
|
return iter(self._fp)
|
|
880
|
return iter(self._fp)
|
|
881
|
|
|
881
|
|
|
882
|
def __getattr__(self, attr):
|
|
882
|
def __getattr__(self, attr):
|
|
883
|
return getattr(self._fp, attr)
|
|
883
|
return getattr(self._fp, attr)
|
|
884
|
|
|
884
|
|
|
885
|
def __enter__(self):
|
|
885
|
def __enter__(self):
|
|
886
|
return self
|
|
886
|
return self
|
|
887
|
|
|
887
|
|
|
888
|
def __exit__(self, exc_type, exc_value, exc_tb):
|
|
888
|
def __exit__(self, exc_type, exc_value, exc_tb):
|
|
889
|
pass
|
|
889
|
pass
|
|
890
|
|
|
890
|
|
|
891
|
def makefileobj(repo, pat, node=None, desc=None, total=None,
|
|
891
|
def makefileobj(repo, pat, node=None, desc=None, total=None,
|
|
892
|
seqno=None, revwidth=None, mode='wb', modemap=None,
|
|
892
|
seqno=None, revwidth=None, mode='wb', modemap=None,
|
|
893
|
pathname=None):
|
|
893
|
pathname=None):
|
|
894
|
|
|
894
|
|
|
895
|
writable = mode not in ('r', 'rb')
|
|
895
|
writable = mode not in ('r', 'rb')
|
|
896
|
|
|
896
|
|
|
897
|
if isstdiofilename(pat):
|
|
897
|
if isstdiofilename(pat):
|
|
898
|
if writable:
|
|
898
|
if writable:
|
|
899
|
fp = repo.ui.fout
|
|
899
|
fp = repo.ui.fout
|
|
900
|
else:
|
|
900
|
else:
|
|
901
|
fp = repo.ui.fin
|
|
901
|
fp = repo.ui.fin
|
|
902
|
return _unclosablefile(fp)
|
|
902
|
return _unclosablefile(fp)
|
|
903
|
fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
|
|
903
|
fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
|
|
904
|
if modemap is not None:
|
|
904
|
if modemap is not None:
|
|
905
|
mode = modemap.get(fn, mode)
|
|
905
|
mode = modemap.get(fn, mode)
|
|
906
|
if mode == 'wb':
|
|
906
|
if mode == 'wb':
|
|
907
|
modemap[fn] = 'ab'
|
|
907
|
modemap[fn] = 'ab'
|
|
908
|
return open(fn, mode)
|
|
908
|
return open(fn, mode)
|
|
909
|
|
|
909
|
|
|
910
|
def openrevlog(repo, cmd, file_, opts):
|
|
910
|
def openrevlog(repo, cmd, file_, opts):
|
|
911
|
"""opens the changelog, manifest, a filelog or a given revlog"""
|
|
911
|
"""opens the changelog, manifest, a filelog or a given revlog"""
|
|
912
|
cl = opts['changelog']
|
|
912
|
cl = opts['changelog']
|
|
913
|
mf = opts['manifest']
|
|
913
|
mf = opts['manifest']
|
|
914
|
dir = opts['dir']
|
|
914
|
dir = opts['dir']
|
|
915
|
msg = None
|
|
915
|
msg = None
|
|
916
|
if cl and mf:
|
|
916
|
if cl and mf:
|
|
917
|
msg = _('cannot specify --changelog and --manifest at the same time')
|
|
917
|
msg = _('cannot specify --changelog and --manifest at the same time')
|
|
918
|
elif cl and dir:
|
|
918
|
elif cl and dir:
|
|
919
|
msg = _('cannot specify --changelog and --dir at the same time')
|
|
919
|
msg = _('cannot specify --changelog and --dir at the same time')
|
|
920
|
elif cl or mf or dir:
|
|
920
|
elif cl or mf or dir:
|
|
921
|
if file_:
|
|
921
|
if file_:
|
|
922
|
msg = _('cannot specify filename with --changelog or --manifest')
|
|
922
|
msg = _('cannot specify filename with --changelog or --manifest')
|
|
923
|
elif not repo:
|
|
923
|
elif not repo:
|
|
924
|
msg = _('cannot specify --changelog or --manifest or --dir '
|
|
924
|
msg = _('cannot specify --changelog or --manifest or --dir '
|
|
925
|
'without a repository')
|
|
925
|
'without a repository')
|
|
926
|
if msg:
|
|
926
|
if msg:
|
|
927
|
raise error.Abort(msg)
|
|
927
|
raise error.Abort(msg)
|
|
928
|
|
|
928
|
|
|
929
|
r = None
|
|
929
|
r = None
|
|
930
|
if repo:
|
|
930
|
if repo:
|
|
931
|
if cl:
|
|
931
|
if cl:
|
|
932
|
r = repo.unfiltered().changelog
|
|
932
|
r = repo.unfiltered().changelog
|
|
933
|
elif dir:
|
|
933
|
elif dir:
|
|
934
|
if 'treemanifest' not in repo.requirements:
|
|
934
|
if 'treemanifest' not in repo.requirements:
|
|
935
|
raise error.Abort(_("--dir can only be used on repos with "
|
|
935
|
raise error.Abort(_("--dir can only be used on repos with "
|
|
936
|
"treemanifest enabled"))
|
|
936
|
"treemanifest enabled"))
|
|
937
|
dirlog = repo.manifestlog._revlog.dirlog(dir)
|
|
937
|
dirlog = repo.manifestlog._revlog.dirlog(dir)
|
|
938
|
if len(dirlog):
|
|
938
|
if len(dirlog):
|
|
939
|
r = dirlog
|
|
939
|
r = dirlog
|
|
940
|
elif mf:
|
|
940
|
elif mf:
|
|
941
|
r = repo.manifestlog._revlog
|
|
941
|
r = repo.manifestlog._revlog
|
|
942
|
elif file_:
|
|
942
|
elif file_:
|
|
943
|
filelog = repo.file(file_)
|
|
943
|
filelog = repo.file(file_)
|
|
944
|
if len(filelog):
|
|
944
|
if len(filelog):
|
|
945
|
r = filelog
|
|
945
|
r = filelog
|
|
946
|
if not r:
|
|
946
|
if not r:
|
|
947
|
if not file_:
|
|
947
|
if not file_:
|
|
948
|
raise error.CommandError(cmd, _('invalid arguments'))
|
|
948
|
raise error.CommandError(cmd, _('invalid arguments'))
|
|
949
|
if not os.path.isfile(file_):
|
|
949
|
if not os.path.isfile(file_):
|
|
950
|
raise error.Abort(_("revlog '%s' not found") % file_)
|
|
950
|
raise error.Abort(_("revlog '%s' not found") % file_)
|
|
951
|
r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
|
|
951
|
r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
|
|
952
|
file_[:-2] + ".i")
|
|
952
|
file_[:-2] + ".i")
|
|
953
|
return r
|
|
953
|
return r
|
|
954
|
|
|
954
|
|
|
955
|
def copy(ui, repo, pats, opts, rename=False):
|
|
955
|
def copy(ui, repo, pats, opts, rename=False):
|
|
956
|
# called with the repo lock held
|
|
956
|
# called with the repo lock held
|
|
957
|
#
|
|
957
|
#
|
|
958
|
# hgsep => pathname that uses "/" to separate directories
|
|
958
|
# hgsep => pathname that uses "/" to separate directories
|
|
959
|
# ossep => pathname that uses os.sep to separate directories
|
|
959
|
# ossep => pathname that uses os.sep to separate directories
|
|
960
|
cwd = repo.getcwd()
|
|
960
|
cwd = repo.getcwd()
|
|
961
|
targets = {}
|
|
961
|
targets = {}
|
|
962
|
after = opts.get("after")
|
|
962
|
after = opts.get("after")
|
|
963
|
dryrun = opts.get("dry_run")
|
|
963
|
dryrun = opts.get("dry_run")
|
|
964
|
wctx = repo[None]
|
|
964
|
wctx = repo[None]
|
|
965
|
|
|
965
|
|
|
966
|
def walkpat(pat):
|
|
966
|
def walkpat(pat):
|
|
967
|
srcs = []
|
|
967
|
srcs = []
|
|
968
|
if after:
|
|
968
|
if after:
|
|
969
|
badstates = '?'
|
|
969
|
badstates = '?'
|
|
970
|
else:
|
|
970
|
else:
|
|
971
|
badstates = '?r'
|
|
971
|
badstates = '?r'
|
|
972
|
m = scmutil.match(wctx, [pat], opts, globbed=True)
|
|
972
|
m = scmutil.match(wctx, [pat], opts, globbed=True)
|
|
973
|
for abs in wctx.walk(m):
|
|
973
|
for abs in wctx.walk(m):
|
|
974
|
state = repo.dirstate[abs]
|
|
974
|
state = repo.dirstate[abs]
|
|
975
|
rel = m.rel(abs)
|
|
975
|
rel = m.rel(abs)
|
|
976
|
exact = m.exact(abs)
|
|
976
|
exact = m.exact(abs)
|
|
977
|
if state in badstates:
|
|
977
|
if state in badstates:
|
|
978
|
if exact and state == '?':
|
|
978
|
if exact and state == '?':
|
|
979
|
ui.warn(_('%s: not copying - file is not managed\n') % rel)
|
|
979
|
ui.warn(_('%s: not copying - file is not managed\n') % rel)
|
|
980
|
if exact and state == 'r':
|
|
980
|
if exact and state == 'r':
|
|
981
|
ui.warn(_('%s: not copying - file has been marked for'
|
|
981
|
ui.warn(_('%s: not copying - file has been marked for'
|
|
982
|
' remove\n') % rel)
|
|
982
|
' remove\n') % rel)
|
|
983
|
continue
|
|
983
|
continue
|
|
984
|
# abs: hgsep
|
|
984
|
# abs: hgsep
|
|
985
|
# rel: ossep
|
|
985
|
# rel: ossep
|
|
986
|
srcs.append((abs, rel, exact))
|
|
986
|
srcs.append((abs, rel, exact))
|
|
987
|
return srcs
|
|
987
|
return srcs
|
|
988
|
|
|
988
|
|
|
989
|
# abssrc: hgsep
|
|
989
|
# abssrc: hgsep
|
|
990
|
# relsrc: ossep
|
|
990
|
# relsrc: ossep
|
|
991
|
# otarget: ossep
|
|
991
|
# otarget: ossep
|
|
992
|
def copyfile(abssrc, relsrc, otarget, exact):
|
|
992
|
def copyfile(abssrc, relsrc, otarget, exact):
|
|
993
|
abstarget = pathutil.canonpath(repo.root, cwd, otarget)
|
|
993
|
abstarget = pathutil.canonpath(repo.root, cwd, otarget)
|
|
994
|
if '/' in abstarget:
|
|
994
|
if '/' in abstarget:
|
|
995
|
# We cannot normalize abstarget itself, this would prevent
|
|
995
|
# We cannot normalize abstarget itself, this would prevent
|
|
996
|
# case only renames, like a => A.
|
|
996
|
# case only renames, like a => A.
|
|
997
|
abspath, absname = abstarget.rsplit('/', 1)
|
|
997
|
abspath, absname = abstarget.rsplit('/', 1)
|
|
998
|
abstarget = repo.dirstate.normalize(abspath) + '/' + absname
|
|
998
|
abstarget = repo.dirstate.normalize(abspath) + '/' + absname
|
|
999
|
reltarget = repo.pathto(abstarget, cwd)
|
|
999
|
reltarget = repo.pathto(abstarget, cwd)
|
|
1000
|
target = repo.wjoin(abstarget)
|
|
1000
|
target = repo.wjoin(abstarget)
|
|
1001
|
src = repo.wjoin(abssrc)
|
|
1001
|
src = repo.wjoin(abssrc)
|
|
1002
|
state = repo.dirstate[abstarget]
|
|
1002
|
state = repo.dirstate[abstarget]
|
|
1003
|
|
|
1003
|
|
|
1004
|
scmutil.checkportable(ui, abstarget)
|
|
1004
|
scmutil.checkportable(ui, abstarget)
|
|
1005
|
|
|
1005
|
|
|
1006
|
# check for collisions
|
|
1006
|
# check for collisions
|
|
1007
|
prevsrc = targets.get(abstarget)
|
|
1007
|
prevsrc = targets.get(abstarget)
|
|
1008
|
if prevsrc is not None:
|
|
1008
|
if prevsrc is not None:
|
|
1009
|
ui.warn(_('%s: not overwriting - %s collides with %s\n') %
|
|
1009
|
ui.warn(_('%s: not overwriting - %s collides with %s\n') %
|
|
1010
|
(reltarget, repo.pathto(abssrc, cwd),
|
|
1010
|
(reltarget, repo.pathto(abssrc, cwd),
|
|
1011
|
repo.pathto(prevsrc, cwd)))
|
|
1011
|
repo.pathto(prevsrc, cwd)))
|
|
1012
|
return
|
|
1012
|
return
|
|
1013
|
|
|
1013
|
|
|
1014
|
# check for overwrites
|
|
1014
|
# check for overwrites
|
|
1015
|
exists = os.path.lexists(target)
|
|
1015
|
exists = os.path.lexists(target)
|
|
1016
|
samefile = False
|
|
1016
|
samefile = False
|
|
1017
|
if exists and abssrc != abstarget:
|
|
1017
|
if exists and abssrc != abstarget:
|
|
1018
|
if (repo.dirstate.normalize(abssrc) ==
|
|
1018
|
if (repo.dirstate.normalize(abssrc) ==
|
|
1019
|
repo.dirstate.normalize(abstarget)):
|
|
1019
|
repo.dirstate.normalize(abstarget)):
|
|
1020
|
if not rename:
|
|
1020
|
if not rename:
|
|
1021
|
ui.warn(_("%s: can't copy - same file\n") % reltarget)
|
|
1021
|
ui.warn(_("%s: can't copy - same file\n") % reltarget)
|
|
1022
|
return
|
|
1022
|
return
|
|
1023
|
exists = False
|
|
1023
|
exists = False
|
|
1024
|
samefile = True
|
|
1024
|
samefile = True
|
|
1025
|
|
|
1025
|
|
|
1026
|
if not after and exists or after and state in 'mn':
|
|
1026
|
if not after and exists or after and state in 'mn':
|
|
1027
|
if not opts['force']:
|
|
1027
|
if not opts['force']:
|
|
1028
|
if state in 'mn':
|
|
1028
|
if state in 'mn':
|
|
1029
|
msg = _('%s: not overwriting - file already committed\n')
|
|
1029
|
msg = _('%s: not overwriting - file already committed\n')
|
|
1030
|
if after:
|
|
1030
|
if after:
|
|
1031
|
flags = '--after --force'
|
|
1031
|
flags = '--after --force'
|
|
1032
|
else:
|
|
1032
|
else:
|
|
1033
|
flags = '--force'
|
|
1033
|
flags = '--force'
|
|
1034
|
if rename:
|
|
1034
|
if rename:
|
|
1035
|
hint = _('(hg rename %s to replace the file by '
|
|
1035
|
hint = _('(hg rename %s to replace the file by '
|
|
1036
|
'recording a rename)\n') % flags
|
|
1036
|
'recording a rename)\n') % flags
|
|
1037
|
else:
|
|
1037
|
else:
|
|
1038
|
hint = _('(hg copy %s to replace the file by '
|
|
1038
|
hint = _('(hg copy %s to replace the file by '
|
|
1039
|
'recording a copy)\n') % flags
|
|
1039
|
'recording a copy)\n') % flags
|
|
1040
|
else:
|
|
1040
|
else:
|
|
1041
|
msg = _('%s: not overwriting - file exists\n')
|
|
1041
|
msg = _('%s: not overwriting - file exists\n')
|
|
1042
|
if rename:
|
|
1042
|
if rename:
|
|
1043
|
hint = _('(hg rename --after to record the rename)\n')
|
|
1043
|
hint = _('(hg rename --after to record the rename)\n')
|
|
1044
|
else:
|
|
1044
|
else:
|
|
1045
|
hint = _('(hg copy --after to record the copy)\n')
|
|
1045
|
hint = _('(hg copy --after to record the copy)\n')
|
|
1046
|
ui.warn(msg % reltarget)
|
|
1046
|
ui.warn(msg % reltarget)
|
|
1047
|
ui.warn(hint)
|
|
1047
|
ui.warn(hint)
|
|
1048
|
return
|
|
1048
|
return
|
|
1049
|
|
|
1049
|
|
|
1050
|
if after:
|
|
1050
|
if after:
|
|
1051
|
if not exists:
|
|
1051
|
if not exists:
|
|
1052
|
if rename:
|
|
1052
|
if rename:
|
|
1053
|
ui.warn(_('%s: not recording move - %s does not exist\n') %
|
|
1053
|
ui.warn(_('%s: not recording move - %s does not exist\n') %
|
|
1054
|
(relsrc, reltarget))
|
|
1054
|
(relsrc, reltarget))
|
|
1055
|
else:
|
|
1055
|
else:
|
|
1056
|
ui.warn(_('%s: not recording copy - %s does not exist\n') %
|
|
1056
|
ui.warn(_('%s: not recording copy - %s does not exist\n') %
|
|
1057
|
(relsrc, reltarget))
|
|
1057
|
(relsrc, reltarget))
|
|
1058
|
return
|
|
1058
|
return
|
|
1059
|
elif not dryrun:
|
|
1059
|
elif not dryrun:
|
|
1060
|
try:
|
|
1060
|
try:
|
|
1061
|
if exists:
|
|
1061
|
if exists:
|
|
1062
|
os.unlink(target)
|
|
1062
|
os.unlink(target)
|
|
1063
|
targetdir = os.path.dirname(target) or '.'
|
|
1063
|
targetdir = os.path.dirname(target) or '.'
|
|
1064
|
if not os.path.isdir(targetdir):
|
|
1064
|
if not os.path.isdir(targetdir):
|
|
1065
|
os.makedirs(targetdir)
|
|
1065
|
os.makedirs(targetdir)
|
|
1066
|
if samefile:
|
|
1066
|
if samefile:
|
|
1067
|
tmp = target + "~hgrename"
|
|
1067
|
tmp = target + "~hgrename"
|
|
1068
|
os.rename(src, tmp)
|
|
1068
|
os.rename(src, tmp)
|
|
1069
|
os.rename(tmp, target)
|
|
1069
|
os.rename(tmp, target)
|
|
1070
|
else:
|
|
1070
|
else:
|
|
1071
|
util.copyfile(src, target)
|
|
1071
|
util.copyfile(src, target)
|
|
1072
|
srcexists = True
|
|
1072
|
srcexists = True
|
|
1073
|
except IOError as inst:
|
|
1073
|
except IOError as inst:
|
|
1074
|
if inst.errno == errno.ENOENT:
|
|
1074
|
if inst.errno == errno.ENOENT:
|
|
1075
|
ui.warn(_('%s: deleted in working directory\n') % relsrc)
|
|
1075
|
ui.warn(_('%s: deleted in working directory\n') % relsrc)
|
|
1076
|
srcexists = False
|
|
1076
|
srcexists = False
|
|
1077
|
else:
|
|
1077
|
else:
|
|
1078
|
ui.warn(_('%s: cannot copy - %s\n') %
|
|
1078
|
ui.warn(_('%s: cannot copy - %s\n') %
|
|
1079
|
(relsrc, encoding.strtolocal(inst.strerror)))
|
|
1079
|
(relsrc, encoding.strtolocal(inst.strerror)))
|
|
1080
|
return True # report a failure
|
|
1080
|
return True # report a failure
|
|
1081
|
|
|
1081
|
|
|
1082
|
if ui.verbose or not exact:
|
|
1082
|
if ui.verbose or not exact:
|
|
1083
|
if rename:
|
|
1083
|
if rename:
|
|
1084
|
ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
|
|
1084
|
ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
|
|
1085
|
else:
|
|
1085
|
else:
|
|
1086
|
ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
|
|
1086
|
ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
|
|
1087
|
|
|
1087
|
|
|
1088
|
targets[abstarget] = abssrc
|
|
1088
|
targets[abstarget] = abssrc
|
|
1089
|
|
|
1089
|
|
|
1090
|
# fix up dirstate
|
|
1090
|
# fix up dirstate
|
|
1091
|
scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
|
|
1091
|
scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
|
|
1092
|
dryrun=dryrun, cwd=cwd)
|
|
1092
|
dryrun=dryrun, cwd=cwd)
|
|
1093
|
if rename and not dryrun:
|
|
1093
|
if rename and not dryrun:
|
|
1094
|
if not after and srcexists and not samefile:
|
|
1094
|
if not after and srcexists and not samefile:
|
|
1095
|
repo.wvfs.unlinkpath(abssrc)
|
|
1095
|
repo.wvfs.unlinkpath(abssrc)
|
|
1096
|
wctx.forget([abssrc])
|
|
1096
|
wctx.forget([abssrc])
|
|
1097
|
|
|
1097
|
|
|
1098
|
# pat: ossep
|
|
1098
|
# pat: ossep
|
|
1099
|
# dest ossep
|
|
1099
|
# dest ossep
|
|
1100
|
# srcs: list of (hgsep, hgsep, ossep, bool)
|
|
1100
|
# srcs: list of (hgsep, hgsep, ossep, bool)
|
|
1101
|
# return: function that takes hgsep and returns ossep
|
|
1101
|
# return: function that takes hgsep and returns ossep
|
|
1102
|
def targetpathfn(pat, dest, srcs):
|
|
1102
|
def targetpathfn(pat, dest, srcs):
|
|
1103
|
if os.path.isdir(pat):
|
|
1103
|
if os.path.isdir(pat):
|
|
1104
|
abspfx = pathutil.canonpath(repo.root, cwd, pat)
|
|
1104
|
abspfx = pathutil.canonpath(repo.root, cwd, pat)
|
|
1105
|
abspfx = util.localpath(abspfx)
|
|
1105
|
abspfx = util.localpath(abspfx)
|
|
1106
|
if destdirexists:
|
|
1106
|
if destdirexists:
|
|
1107
|
striplen = len(os.path.split(abspfx)[0])
|
|
1107
|
striplen = len(os.path.split(abspfx)[0])
|
|
1108
|
else:
|
|
1108
|
else:
|
|
1109
|
striplen = len(abspfx)
|
|
1109
|
striplen = len(abspfx)
|
|
1110
|
if striplen:
|
|
1110
|
if striplen:
|
|
1111
|
striplen += len(pycompat.ossep)
|
|
1111
|
striplen += len(pycompat.ossep)
|
|
1112
|
res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
|
|
1112
|
res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
|
|
1113
|
elif destdirexists:
|
|
1113
|
elif destdirexists:
|
|
1114
|
res = lambda p: os.path.join(dest,
|
|
1114
|
res = lambda p: os.path.join(dest,
|
|
1115
|
os.path.basename(util.localpath(p)))
|
|
1115
|
os.path.basename(util.localpath(p)))
|
|
1116
|
else:
|
|
1116
|
else:
|
|
1117
|
res = lambda p: dest
|
|
1117
|
res = lambda p: dest
|
|
1118
|
return res
|
|
1118
|
return res
|
|
1119
|
|
|
1119
|
|
|
1120
|
# pat: ossep
|
|
1120
|
# pat: ossep
|
|
1121
|
# dest ossep
|
|
1121
|
# dest ossep
|
|
1122
|
# srcs: list of (hgsep, hgsep, ossep, bool)
|
|
1122
|
# srcs: list of (hgsep, hgsep, ossep, bool)
|
|
1123
|
# return: function that takes hgsep and returns ossep
|
|
1123
|
# return: function that takes hgsep and returns ossep
|
|
1124
|
def targetpathafterfn(pat, dest, srcs):
|
|
1124
|
def targetpathafterfn(pat, dest, srcs):
|
|
1125
|
if matchmod.patkind(pat):
|
|
1125
|
if matchmod.patkind(pat):
|
|
1126
|
# a mercurial pattern
|
|
1126
|
# a mercurial pattern
|
|
1127
|
res = lambda p: os.path.join(dest,
|
|
1127
|
res = lambda p: os.path.join(dest,
|
|
1128
|
os.path.basename(util.localpath(p)))
|
|
1128
|
os.path.basename(util.localpath(p)))
|
|
1129
|
else:
|
|
1129
|
else:
|
|
1130
|
abspfx = pathutil.canonpath(repo.root, cwd, pat)
|
|
1130
|
abspfx = pathutil.canonpath(repo.root, cwd, pat)
|
|
1131
|
if len(abspfx) < len(srcs[0][0]):
|
|
1131
|
if len(abspfx) < len(srcs[0][0]):
|
|
1132
|
# A directory. Either the target path contains the last
|
|
1132
|
# A directory. Either the target path contains the last
|
|
1133
|
# component of the source path or it does not.
|
|
1133
|
# component of the source path or it does not.
|
|
1134
|
def evalpath(striplen):
|
|
1134
|
def evalpath(striplen):
|
|
1135
|
score = 0
|
|
1135
|
score = 0
|
|
1136
|
for s in srcs:
|
|
1136
|
for s in srcs:
|
|
1137
|
t = os.path.join(dest, util.localpath(s[0])[striplen:])
|
|
1137
|
t = os.path.join(dest, util.localpath(s[0])[striplen:])
|
|
1138
|
if os.path.lexists(t):
|
|
1138
|
if os.path.lexists(t):
|
|
1139
|
score += 1
|
|
1139
|
score += 1
|
|
1140
|
return score
|
|
1140
|
return score
|
|
1141
|
|
|
1141
|
|
|
1142
|
abspfx = util.localpath(abspfx)
|
|
1142
|
abspfx = util.localpath(abspfx)
|
|
1143
|
striplen = len(abspfx)
|
|
1143
|
striplen = len(abspfx)
|
|
1144
|
if striplen:
|
|
1144
|
if striplen:
|
|
1145
|
striplen += len(pycompat.ossep)
|
|
1145
|
striplen += len(pycompat.ossep)
|
|
1146
|
if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
|
|
1146
|
if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
|
|
1147
|
score = evalpath(striplen)
|
|
1147
|
score = evalpath(striplen)
|
|
1148
|
striplen1 = len(os.path.split(abspfx)[0])
|
|
1148
|
striplen1 = len(os.path.split(abspfx)[0])
|
|
1149
|
if striplen1:
|
|
1149
|
if striplen1:
|
|
1150
|
striplen1 += len(pycompat.ossep)
|
|
1150
|
striplen1 += len(pycompat.ossep)
|
|
1151
|
if evalpath(striplen1) > score:
|
|
1151
|
if evalpath(striplen1) > score:
|
|
1152
|
striplen = striplen1
|
|
1152
|
striplen = striplen1
|
|
1153
|
res = lambda p: os.path.join(dest,
|
|
1153
|
res = lambda p: os.path.join(dest,
|
|
1154
|
util.localpath(p)[striplen:])
|
|
1154
|
util.localpath(p)[striplen:])
|
|
1155
|
else:
|
|
1155
|
else:
|
|
1156
|
# a file
|
|
1156
|
# a file
|
|
1157
|
if destdirexists:
|
|
1157
|
if destdirexists:
|
|
1158
|
res = lambda p: os.path.join(dest,
|
|
1158
|
res = lambda p: os.path.join(dest,
|
|
1159
|
os.path.basename(util.localpath(p)))
|
|
1159
|
os.path.basename(util.localpath(p)))
|
|
1160
|
else:
|
|
1160
|
else:
|
|
1161
|
res = lambda p: dest
|
|
1161
|
res = lambda p: dest
|
|
1162
|
return res
|
|
1162
|
return res
|
|
1163
|
|
|
1163
|
|
|
1164
|
pats = scmutil.expandpats(pats)
|
|
1164
|
pats = scmutil.expandpats(pats)
|
|
1165
|
if not pats:
|
|
1165
|
if not pats:
|
|
1166
|
raise error.Abort(_('no source or destination specified'))
|
|
1166
|
raise error.Abort(_('no source or destination specified'))
|
|
1167
|
if len(pats) == 1:
|
|
1167
|
if len(pats) == 1:
|
|
1168
|
raise error.Abort(_('no destination specified'))
|
|
1168
|
raise error.Abort(_('no destination specified'))
|
|
1169
|
dest = pats.pop()
|
|
1169
|
dest = pats.pop()
|
|
1170
|
destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
|
|
1170
|
destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
|
|
1171
|
if not destdirexists:
|
|
1171
|
if not destdirexists:
|
|
1172
|
if len(pats) > 1 or matchmod.patkind(pats[0]):
|
|
1172
|
if len(pats) > 1 or matchmod.patkind(pats[0]):
|
|
1173
|
raise error.Abort(_('with multiple sources, destination must be an '
|
|
1173
|
raise error.Abort(_('with multiple sources, destination must be an '
|
|
1174
|
'existing directory'))
|
|
1174
|
'existing directory'))
|
|
1175
|
if util.endswithsep(dest):
|
|
1175
|
if util.endswithsep(dest):
|
|
1176
|
raise error.Abort(_('destination %s is not a directory') % dest)
|
|
1176
|
raise error.Abort(_('destination %s is not a directory') % dest)
|
|
1177
|
|
|
1177
|
|
|
1178
|
tfn = targetpathfn
|
|
1178
|
tfn = targetpathfn
|
|
1179
|
if after:
|
|
1179
|
if after:
|
|
1180
|
tfn = targetpathafterfn
|
|
1180
|
tfn = targetpathafterfn
|
|
1181
|
copylist = []
|
|
1181
|
copylist = []
|
|
1182
|
for pat in pats:
|
|
1182
|
for pat in pats:
|
|
1183
|
srcs = walkpat(pat)
|
|
1183
|
srcs = walkpat(pat)
|
|
1184
|
if not srcs:
|
|
1184
|
if not srcs:
|
|
1185
|
continue
|
|
1185
|
continue
|
|
1186
|
copylist.append((tfn(pat, dest, srcs), srcs))
|
|
1186
|
copylist.append((tfn(pat, dest, srcs), srcs))
|
|
1187
|
if not copylist:
|
|
1187
|
if not copylist:
|
|
1188
|
raise error.Abort(_('no files to copy'))
|
|
1188
|
raise error.Abort(_('no files to copy'))
|
|
1189
|
|
|
1189
|
|
|
1190
|
errors = 0
|
|
1190
|
errors = 0
|
|
1191
|
for targetpath, srcs in copylist:
|
|
1191
|
for targetpath, srcs in copylist:
|
|
1192
|
for abssrc, relsrc, exact in srcs:
|
|
1192
|
for abssrc, relsrc, exact in srcs:
|
|
1193
|
if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
|
|
1193
|
if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
|
|
1194
|
errors += 1
|
|
1194
|
errors += 1
|
|
1195
|
|
|
1195
|
|
|
1196
|
if errors:
|
|
1196
|
if errors:
|
|
1197
|
ui.warn(_('(consider using --after)\n'))
|
|
1197
|
ui.warn(_('(consider using --after)\n'))
|
|
1198
|
|
|
1198
|
|
|
1199
|
return errors != 0
|
|
1199
|
return errors != 0
|
|
1200
|
|
|
1200
|
|
|
1201
|
## facility to let extension process additional data into an import patch
|
|
1201
|
## facility to let extension process additional data into an import patch
|
|
1202
|
# list of identifier to be executed in order
|
|
1202
|
# list of identifier to be executed in order
|
|
1203
|
extrapreimport = [] # run before commit
|
|
1203
|
extrapreimport = [] # run before commit
|
|
1204
|
extrapostimport = [] # run after commit
|
|
1204
|
extrapostimport = [] # run after commit
|
|
1205
|
# mapping from identifier to actual import function
|
|
1205
|
# mapping from identifier to actual import function
|
|
1206
|
#
|
|
1206
|
#
|
|
1207
|
# 'preimport' are run before the commit is made and are provided the following
|
|
1207
|
# 'preimport' are run before the commit is made and are provided the following
|
|
1208
|
# arguments:
|
|
1208
|
# arguments:
|
|
1209
|
# - repo: the localrepository instance,
|
|
1209
|
# - repo: the localrepository instance,
|
|
1210
|
# - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
|
|
1210
|
# - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
|
|
1211
|
# - extra: the future extra dictionary of the changeset, please mutate it,
|
|
1211
|
# - extra: the future extra dictionary of the changeset, please mutate it,
|
|
1212
|
# - opts: the import options.
|
|
1212
|
# - opts: the import options.
|
|
1213
|
# XXX ideally, we would just pass an ctx ready to be computed, that would allow
|
|
1213
|
# XXX ideally, we would just pass an ctx ready to be computed, that would allow
|
|
1214
|
# mutation of in memory commit and more. Feel free to rework the code to get
|
|
1214
|
# mutation of in memory commit and more. Feel free to rework the code to get
|
|
1215
|
# there.
|
|
1215
|
# there.
|
|
1216
|
extrapreimportmap = {}
|
|
1216
|
extrapreimportmap = {}
|
|
1217
|
# 'postimport' are run after the commit is made and are provided the following
|
|
1217
|
# 'postimport' are run after the commit is made and are provided the following
|
|
1218
|
# argument:
|
|
1218
|
# argument:
|
|
1219
|
# - ctx: the changectx created by import.
|
|
1219
|
# - ctx: the changectx created by import.
|
|
1220
|
extrapostimportmap = {}
|
|
1220
|
extrapostimportmap = {}
|
|
1221
|
|
|
1221
|
|
|
1222
|
def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
|
|
1222
|
def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
|
|
1223
|
"""Utility function used by commands.import to import a single patch
|
|
1223
|
"""Utility function used by commands.import to import a single patch
|
|
1224
|
|
|
1224
|
|
|
1225
|
This function is explicitly defined here to help the evolve extension to
|
|
1225
|
This function is explicitly defined here to help the evolve extension to
|
|
1226
|
wrap this part of the import logic.
|
|
1226
|
wrap this part of the import logic.
|
|
1227
|
|
|
1227
|
|
|
1228
|
The API is currently a bit ugly because it a simple code translation from
|
|
1228
|
The API is currently a bit ugly because it a simple code translation from
|
|
1229
|
the import command. Feel free to make it better.
|
|
1229
|
the import command. Feel free to make it better.
|
|
1230
|
|
|
1230
|
|
|
1231
|
:hunk: a patch (as a binary string)
|
|
1231
|
:hunk: a patch (as a binary string)
|
|
1232
|
:parents: nodes that will be parent of the created commit
|
|
1232
|
:parents: nodes that will be parent of the created commit
|
|
1233
|
:opts: the full dict of option passed to the import command
|
|
1233
|
:opts: the full dict of option passed to the import command
|
|
1234
|
:msgs: list to save commit message to.
|
|
1234
|
:msgs: list to save commit message to.
|
|
1235
|
(used in case we need to save it when failing)
|
|
1235
|
(used in case we need to save it when failing)
|
|
1236
|
:updatefunc: a function that update a repo to a given node
|
|
1236
|
:updatefunc: a function that update a repo to a given node
|
|
1237
|
updatefunc(<repo>, <node>)
|
|
1237
|
updatefunc(<repo>, <node>)
|
|
1238
|
"""
|
|
1238
|
"""
|
|
1239
|
# avoid cycle context -> subrepo -> cmdutil
|
|
1239
|
# avoid cycle context -> subrepo -> cmdutil
|
|
1240
|
from . import context
|
|
1240
|
from . import context
|
|
1241
|
extractdata = patch.extract(ui, hunk)
|
|
1241
|
extractdata = patch.extract(ui, hunk)
|
|
1242
|
tmpname = extractdata.get('filename')
|
|
1242
|
tmpname = extractdata.get('filename')
|
|
1243
|
message = extractdata.get('message')
|
|
1243
|
message = extractdata.get('message')
|
|
1244
|
user = opts.get('user') or extractdata.get('user')
|
|
1244
|
user = opts.get('user') or extractdata.get('user')
|
|
1245
|
date = opts.get('date') or extractdata.get('date')
|
|
1245
|
date = opts.get('date') or extractdata.get('date')
|
|
1246
|
branch = extractdata.get('branch')
|
|
1246
|
branch = extractdata.get('branch')
|
|
1247
|
nodeid = extractdata.get('nodeid')
|
|
1247
|
nodeid = extractdata.get('nodeid')
|
|
1248
|
p1 = extractdata.get('p1')
|
|
1248
|
p1 = extractdata.get('p1')
|
|
1249
|
p2 = extractdata.get('p2')
|
|
1249
|
p2 = extractdata.get('p2')
|
|
1250
|
|
|
1250
|
|
|
1251
|
nocommit = opts.get('no_commit')
|
|
1251
|
nocommit = opts.get('no_commit')
|
|
1252
|
importbranch = opts.get('import_branch')
|
|
1252
|
importbranch = opts.get('import_branch')
|
|
1253
|
update = not opts.get('bypass')
|
|
1253
|
update = not opts.get('bypass')
|
|
1254
|
strip = opts["strip"]
|
|
1254
|
strip = opts["strip"]
|
|
1255
|
prefix = opts["prefix"]
|
|
1255
|
prefix = opts["prefix"]
|
|
1256
|
sim = float(opts.get('similarity') or 0)
|
|
1256
|
sim = float(opts.get('similarity') or 0)
|
|
1257
|
if not tmpname:
|
|
1257
|
if not tmpname:
|
|
1258
|
return (None, None, False)
|
|
1258
|
return (None, None, False)
|
|
1259
|
|
|
1259
|
|
|
1260
|
rejects = False
|
|
1260
|
rejects = False
|
|
1261
|
|
|
1261
|
|
|
1262
|
try:
|
|
1262
|
try:
|
|
1263
|
cmdline_message = logmessage(ui, opts)
|
|
1263
|
cmdline_message = logmessage(ui, opts)
|
|
1264
|
if cmdline_message:
|
|
1264
|
if cmdline_message:
|
|
1265
|
# pickup the cmdline msg
|
|
1265
|
# pickup the cmdline msg
|
|
1266
|
message = cmdline_message
|
|
1266
|
message = cmdline_message
|
|
1267
|
elif message:
|
|
1267
|
elif message:
|
|
1268
|
# pickup the patch msg
|
|
1268
|
# pickup the patch msg
|
|
1269
|
message = message.strip()
|
|
1269
|
message = message.strip()
|
|
1270
|
else:
|
|
1270
|
else:
|
|
1271
|
# launch the editor
|
|
1271
|
# launch the editor
|
|
1272
|
message = None
|
|
1272
|
message = None
|
|
1273
|
ui.debug('message:\n%s\n' % message)
|
|
1273
|
ui.debug('message:\n%s\n' % message)
|
|
1274
|
|
|
1274
|
|
|
1275
|
if len(parents) == 1:
|
|
1275
|
if len(parents) == 1:
|
|
1276
|
parents.append(repo[nullid])
|
|
1276
|
parents.append(repo[nullid])
|
|
1277
|
if opts.get('exact'):
|
|
1277
|
if opts.get('exact'):
|
|
1278
|
if not nodeid or not p1:
|
|
1278
|
if not nodeid or not p1:
|
|
1279
|
raise error.Abort(_('not a Mercurial patch'))
|
|
1279
|
raise error.Abort(_('not a Mercurial patch'))
|
|
1280
|
p1 = repo[p1]
|
|
1280
|
p1 = repo[p1]
|
|
1281
|
p2 = repo[p2 or nullid]
|
|
1281
|
p2 = repo[p2 or nullid]
|
|
1282
|
elif p2:
|
|
1282
|
elif p2:
|
|
1283
|
try:
|
|
1283
|
try:
|
|
1284
|
p1 = repo[p1]
|
|
1284
|
p1 = repo[p1]
|
|
1285
|
p2 = repo[p2]
|
|
1285
|
p2 = repo[p2]
|
|
1286
|
# Without any options, consider p2 only if the
|
|
1286
|
# Without any options, consider p2 only if the
|
|
1287
|
# patch is being applied on top of the recorded
|
|
1287
|
# patch is being applied on top of the recorded
|
|
1288
|
# first parent.
|
|
1288
|
# first parent.
|
|
1289
|
if p1 != parents[0]:
|
|
1289
|
if p1 != parents[0]:
|
|
1290
|
p1 = parents[0]
|
|
1290
|
p1 = parents[0]
|
|
1291
|
p2 = repo[nullid]
|
|
1291
|
p2 = repo[nullid]
|
|
1292
|
except error.RepoError:
|
|
1292
|
except error.RepoError:
|
|
1293
|
p1, p2 = parents
|
|
1293
|
p1, p2 = parents
|
|
1294
|
if p2.node() == nullid:
|
|
1294
|
if p2.node() == nullid:
|
|
1295
|
ui.warn(_("warning: import the patch as a normal revision\n"
|
|
1295
|
ui.warn(_("warning: import the patch as a normal revision\n"
|
|
1296
|
"(use --exact to import the patch as a merge)\n"))
|
|
1296
|
"(use --exact to import the patch as a merge)\n"))
|
|
1297
|
else:
|
|
1297
|
else:
|
|
1298
|
p1, p2 = parents
|
|
1298
|
p1, p2 = parents
|
|
1299
|
|
|
1299
|
|
|
1300
|
n = None
|
|
1300
|
n = None
|
|
1301
|
if update:
|
|
1301
|
if update:
|
|
1302
|
if p1 != parents[0]:
|
|
1302
|
if p1 != parents[0]:
|
|
1303
|
updatefunc(repo, p1.node())
|
|
1303
|
updatefunc(repo, p1.node())
|
|
1304
|
if p2 != parents[1]:
|
|
1304
|
if p2 != parents[1]:
|
|
1305
|
repo.setparents(p1.node(), p2.node())
|
|
1305
|
repo.setparents(p1.node(), p2.node())
|
|
1306
|
|
|
1306
|
|
|
1307
|
if opts.get('exact') or importbranch:
|
|
1307
|
if opts.get('exact') or importbranch:
|
|
1308
|
repo.dirstate.setbranch(branch or 'default')
|
|
1308
|
repo.dirstate.setbranch(branch or 'default')
|
|
1309
|
|
|
1309
|
|
|
1310
|
partial = opts.get('partial', False)
|
|
1310
|
partial = opts.get('partial', False)
|
|
1311
|
files = set()
|
|
1311
|
files = set()
|
|
1312
|
try:
|
|
1312
|
try:
|
|
1313
|
patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
|
|
1313
|
patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
|
|
1314
|
files=files, eolmode=None, similarity=sim / 100.0)
|
|
1314
|
files=files, eolmode=None, similarity=sim / 100.0)
|
|
1315
|
except error.PatchError as e:
|
|
1315
|
except error.PatchError as e:
|
|
1316
|
if not partial:
|
|
1316
|
if not partial:
|
|
1317
|
raise error.Abort(str(e))
|
|
1317
|
raise error.Abort(str(e))
|
|
1318
|
if partial:
|
|
1318
|
if partial:
|
|
1319
|
rejects = True
|
|
1319
|
rejects = True
|
|
1320
|
|
|
1320
|
|
|
1321
|
files = list(files)
|
|
1321
|
files = list(files)
|
|
1322
|
if nocommit:
|
|
1322
|
if nocommit:
|
|
1323
|
if message:
|
|
1323
|
if message:
|
|
1324
|
msgs.append(message)
|
|
1324
|
msgs.append(message)
|
|
1325
|
else:
|
|
1325
|
else:
|
|
1326
|
if opts.get('exact') or p2:
|
|
1326
|
if opts.get('exact') or p2:
|
|
1327
|
# If you got here, you either use --force and know what
|
|
1327
|
# If you got here, you either use --force and know what
|
|
1328
|
# you are doing or used --exact or a merge patch while
|
|
1328
|
# you are doing or used --exact or a merge patch while
|
|
1329
|
# being updated to its first parent.
|
|
1329
|
# being updated to its first parent.
|
|
1330
|
m = None
|
|
1330
|
m = None
|
|
1331
|
else:
|
|
1331
|
else:
|
|
1332
|
m = scmutil.matchfiles(repo, files or [])
|
|
1332
|
m = scmutil.matchfiles(repo, files or [])
|
|
1333
|
editform = mergeeditform(repo[None], 'import.normal')
|
|
1333
|
editform = mergeeditform(repo[None], 'import.normal')
|
|
1334
|
if opts.get('exact'):
|
|
1334
|
if opts.get('exact'):
|
|
1335
|
editor = None
|
|
1335
|
editor = None
|
|
1336
|
else:
|
|
1336
|
else:
|
|
1337
|
editor = getcommiteditor(editform=editform,
|
|
1337
|
editor = getcommiteditor(editform=editform,
|
|
1338
|
**pycompat.strkwargs(opts))
|
|
1338
|
**pycompat.strkwargs(opts))
|
|
1339
|
extra = {}
|
|
1339
|
extra = {}
|
|
1340
|
for idfunc in extrapreimport:
|
|
1340
|
for idfunc in extrapreimport:
|
|
1341
|
extrapreimportmap[idfunc](repo, extractdata, extra, opts)
|
|
1341
|
extrapreimportmap[idfunc](repo, extractdata, extra, opts)
|
|
1342
|
overrides = {}
|
|
1342
|
overrides = {}
|
|
1343
|
if partial:
|
|
1343
|
if partial:
|
|
1344
|
overrides[('ui', 'allowemptycommit')] = True
|
|
1344
|
overrides[('ui', 'allowemptycommit')] = True
|
|
1345
|
with repo.ui.configoverride(overrides, 'import'):
|
|
1345
|
with repo.ui.configoverride(overrides, 'import'):
|
|
1346
|
n = repo.commit(message, user,
|
|
1346
|
n = repo.commit(message, user,
|
|
1347
|
date, match=m,
|
|
1347
|
date, match=m,
|
|
1348
|
editor=editor, extra=extra)
|
|
1348
|
editor=editor, extra=extra)
|
|
1349
|
for idfunc in extrapostimport:
|
|
1349
|
for idfunc in extrapostimport:
|
|
1350
|
extrapostimportmap[idfunc](repo[n])
|
|
1350
|
extrapostimportmap[idfunc](repo[n])
|
|
1351
|
else:
|
|
1351
|
else:
|
|
1352
|
if opts.get('exact') or importbranch:
|
|
1352
|
if opts.get('exact') or importbranch:
|
|
1353
|
branch = branch or 'default'
|
|
1353
|
branch = branch or 'default'
|
|
1354
|
else:
|
|
1354
|
else:
|
|
1355
|
branch = p1.branch()
|
|
1355
|
branch = p1.branch()
|
|
1356
|
store = patch.filestore()
|
|
1356
|
store = patch.filestore()
|
|
1357
|
try:
|
|
1357
|
try:
|
|
1358
|
files = set()
|
|
1358
|
files = set()
|
|
1359
|
try:
|
|
1359
|
try:
|
|
1360
|
patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
|
|
1360
|
patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
|
|
1361
|
files, eolmode=None)
|
|
1361
|
files, eolmode=None)
|
|
1362
|
except error.PatchError as e:
|
|
1362
|
except error.PatchError as e:
|
|
1363
|
raise error.Abort(str(e))
|
|
1363
|
raise error.Abort(str(e))
|
|
1364
|
if opts.get('exact'):
|
|
1364
|
if opts.get('exact'):
|
|
1365
|
editor = None
|
|
1365
|
editor = None
|
|
1366
|
else:
|
|
1366
|
else:
|
|
1367
|
editor = getcommiteditor(editform='import.bypass')
|
|
1367
|
editor = getcommiteditor(editform='import.bypass')
|
|
1368
|
memctx = context.memctx(repo, (p1.node(), p2.node()),
|
|
1368
|
memctx = context.memctx(repo, (p1.node(), p2.node()),
|
|
1369
|
message,
|
|
1369
|
message,
|
|
1370
|
files=files,
|
|
1370
|
files=files,
|
|
1371
|
filectxfn=store,
|
|
1371
|
filectxfn=store,
|
|
1372
|
user=user,
|
|
1372
|
user=user,
|
|
1373
|
date=date,
|
|
1373
|
date=date,
|
|
1374
|
branch=branch,
|
|
1374
|
branch=branch,
|
|
1375
|
editor=editor)
|
|
1375
|
editor=editor)
|
|
1376
|
n = memctx.commit()
|
|
1376
|
n = memctx.commit()
|
|
1377
|
finally:
|
|
1377
|
finally:
|
|
1378
|
store.close()
|
|
1378
|
store.close()
|
|
1379
|
if opts.get('exact') and nocommit:
|
|
1379
|
if opts.get('exact') and nocommit:
|
|
1380
|
# --exact with --no-commit is still useful in that it does merge
|
|
1380
|
# --exact with --no-commit is still useful in that it does merge
|
|
1381
|
# and branch bits
|
|
1381
|
# and branch bits
|
|
1382
|
ui.warn(_("warning: can't check exact import with --no-commit\n"))
|
|
1382
|
ui.warn(_("warning: can't check exact import with --no-commit\n"))
|
|
1383
|
elif opts.get('exact') and hex(n) != nodeid:
|
|
1383
|
elif opts.get('exact') and hex(n) != nodeid:
|
|
1384
|
raise error.Abort(_('patch is damaged or loses information'))
|
|
1384
|
raise error.Abort(_('patch is damaged or loses information'))
|
|
1385
|
msg = _('applied to working directory')
|
|
1385
|
msg = _('applied to working directory')
|
|
1386
|
if n:
|
|
1386
|
if n:
|
|
1387
|
# i18n: refers to a short changeset id
|
|
1387
|
# i18n: refers to a short changeset id
|
|
1388
|
msg = _('created %s') % short(n)
|
|
1388
|
msg = _('created %s') % short(n)
|
|
1389
|
return (msg, n, rejects)
|
|
1389
|
return (msg, n, rejects)
|
|
1390
|
finally:
|
|
1390
|
finally:
|
|
1391
|
os.unlink(tmpname)
|
|
1391
|
os.unlink(tmpname)
|
|
1392
|
|
|
1392
|
|
|
1393
|
# facility to let extensions include additional data in an exported patch
|
|
1393
|
# facility to let extensions include additional data in an exported patch
|
|
1394
|
# list of identifiers to be executed in order
|
|
1394
|
# list of identifiers to be executed in order
|
|
1395
|
extraexport = []
|
|
1395
|
extraexport = []
|
|
1396
|
# mapping from identifier to actual export function
|
|
1396
|
# mapping from identifier to actual export function
|
|
1397
|
# function as to return a string to be added to the header or None
|
|
1397
|
# function as to return a string to be added to the header or None
|
|
1398
|
# it is given two arguments (sequencenumber, changectx)
|
|
1398
|
# it is given two arguments (sequencenumber, changectx)
|
|
1399
|
extraexportmap = {}
|
|
1399
|
extraexportmap = {}
|
|
1400
|
|
|
1400
|
|
|
1401
|
def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
|
|
1401
|
def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
|
|
1402
|
node = scmutil.binnode(ctx)
|
|
1402
|
node = scmutil.binnode(ctx)
|
|
1403
|
parents = [p.node() for p in ctx.parents() if p]
|
|
1403
|
parents = [p.node() for p in ctx.parents() if p]
|
|
1404
|
branch = ctx.branch()
|
|
1404
|
branch = ctx.branch()
|
|
1405
|
if switch_parent:
|
|
1405
|
if switch_parent:
|
|
1406
|
parents.reverse()
|
|
1406
|
parents.reverse()
|
|
1407
|
|
|
1407
|
|
|
1408
|
if parents:
|
|
1408
|
if parents:
|
|
1409
|
prev = parents[0]
|
|
1409
|
prev = parents[0]
|
|
1410
|
else:
|
|
1410
|
else:
|
|
1411
|
prev = nullid
|
|
1411
|
prev = nullid
|
|
1412
|
|
|
1412
|
|
|
1413
|
write("# HG changeset patch\n")
|
|
1413
|
write("# HG changeset patch\n")
|
|
1414
|
write("# User %s\n" % ctx.user())
|
|
1414
|
write("# User %s\n" % ctx.user())
|
|
1415
|
write("# Date %d %d\n" % ctx.date())
|
|
1415
|
write("# Date %d %d\n" % ctx.date())
|
|
1416
|
write("# %s\n" % util.datestr(ctx.date()))
|
|
1416
|
write("# %s\n" % util.datestr(ctx.date()))
|
|
1417
|
if branch and branch != 'default':
|
|
1417
|
if branch and branch != 'default':
|
|
1418
|
write("# Branch %s\n" % branch)
|
|
1418
|
write("# Branch %s\n" % branch)
|
|
1419
|
write("# Node ID %s\n" % hex(node))
|
|
1419
|
write("# Node ID %s\n" % hex(node))
|
|
1420
|
write("# Parent %s\n" % hex(prev))
|
|
1420
|
write("# Parent %s\n" % hex(prev))
|
|
1421
|
if len(parents) > 1:
|
|
1421
|
if len(parents) > 1:
|
|
1422
|
write("# Parent %s\n" % hex(parents[1]))
|
|
1422
|
write("# Parent %s\n" % hex(parents[1]))
|
|
1423
|
|
|
1423
|
|
|
1424
|
for headerid in extraexport:
|
|
1424
|
for headerid in extraexport:
|
|
1425
|
header = extraexportmap[headerid](seqno, ctx)
|
|
1425
|
header = extraexportmap[headerid](seqno, ctx)
|
|
1426
|
if header is not None:
|
|
1426
|
if header is not None:
|
|
1427
|
write('# %s\n' % header)
|
|
1427
|
write('# %s\n' % header)
|
|
1428
|
write(ctx.description().rstrip())
|
|
1428
|
write(ctx.description().rstrip())
|
|
1429
|
write("\n\n")
|
|
1429
|
write("\n\n")
|
|
1430
|
|
|
1430
|
|
|
1431
|
for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
|
|
1431
|
for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
|
|
1432
|
write(chunk, label=label)
|
|
1432
|
write(chunk, label=label)
|
|
1433
|
|
|
1433
|
|
|
1434
|
def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
|
|
1434
|
def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
|
|
1435
|
opts=None, match=None):
|
|
1435
|
opts=None, match=None):
|
|
1436
|
'''export changesets as hg patches
|
|
1436
|
'''export changesets as hg patches
|
|
1437
|
|
|
1437
|
|
|
1438
|
Args:
|
|
1438
|
Args:
|
|
1439
|
repo: The repository from which we're exporting revisions.
|
|
1439
|
repo: The repository from which we're exporting revisions.
|
|
1440
|
revs: A list of revisions to export as revision numbers.
|
|
1440
|
revs: A list of revisions to export as revision numbers.
|
|
1441
|
fntemplate: An optional string to use for generating patch file names.
|
|
1441
|
fntemplate: An optional string to use for generating patch file names.
|
|
1442
|
fp: An optional file-like object to which patches should be written.
|
|
1442
|
fp: An optional file-like object to which patches should be written.
|
|
1443
|
switch_parent: If True, show diffs against second parent when not nullid.
|
|
1443
|
switch_parent: If True, show diffs against second parent when not nullid.
|
|
1444
|
Default is false, which always shows diff against p1.
|
|
1444
|
Default is false, which always shows diff against p1.
|
|
1445
|
opts: diff options to use for generating the patch.
|
|
1445
|
opts: diff options to use for generating the patch.
|
|
1446
|
match: If specified, only export changes to files matching this matcher.
|
|
1446
|
match: If specified, only export changes to files matching this matcher.
|
|
1447
|
|
|
1447
|
|
|
1448
|
Returns:
|
|
1448
|
Returns:
|
|
1449
|
Nothing.
|
|
1449
|
Nothing.
|
|
1450
|
|
|
1450
|
|
|
1451
|
Side Effect:
|
|
1451
|
Side Effect:
|
|
1452
|
"HG Changeset Patch" data is emitted to one of the following
|
|
1452
|
"HG Changeset Patch" data is emitted to one of the following
|
|
1453
|
destinations:
|
|
1453
|
destinations:
|
|
1454
|
fp is specified: All revs are written to the specified
|
|
1454
|
fp is specified: All revs are written to the specified
|
|
1455
|
file-like object.
|
|
1455
|
file-like object.
|
|
1456
|
fntemplate specified: Each rev is written to a unique file named using
|
|
1456
|
fntemplate specified: Each rev is written to a unique file named using
|
|
1457
|
the given template.
|
|
1457
|
the given template.
|
|
1458
|
Neither fp nor template specified: All revs written to repo.ui.write()
|
|
1458
|
Neither fp nor template specified: All revs written to repo.ui.write()
|
|
1459
|
'''
|
|
1459
|
'''
|
|
1460
|
|
|
1460
|
|
|
1461
|
total = len(revs)
|
|
1461
|
total = len(revs)
|
|
1462
|
revwidth = max(len(str(rev)) for rev in revs)
|
|
1462
|
revwidth = max(len(str(rev)) for rev in revs)
|
|
1463
|
filemode = {}
|
|
1463
|
filemode = {}
|
|
1464
|
|
|
1464
|
|
|
1465
|
write = None
|
|
1465
|
write = None
|
|
1466
|
dest = '<unnamed>'
|
|
1466
|
dest = '<unnamed>'
|
|
1467
|
if fp:
|
|
1467
|
if fp:
|
|
1468
|
dest = getattr(fp, 'name', dest)
|
|
1468
|
dest = getattr(fp, 'name', dest)
|
|
1469
|
def write(s, **kw):
|
|
1469
|
def write(s, **kw):
|
|
1470
|
fp.write(s)
|
|
1470
|
fp.write(s)
|
|
1471
|
elif not fntemplate:
|
|
1471
|
elif not fntemplate:
|
|
1472
|
write = repo.ui.write
|
|
1472
|
write = repo.ui.write
|
|
1473
|
|
|
1473
|
|
|
1474
|
for seqno, rev in enumerate(revs, 1):
|
|
1474
|
for seqno, rev in enumerate(revs, 1):
|
|
1475
|
ctx = repo[rev]
|
|
1475
|
ctx = repo[rev]
|
|
1476
|
fo = None
|
|
1476
|
fo = None
|
|
1477
|
if not fp and fntemplate:
|
|
1477
|
if not fp and fntemplate:
|
|
1478
|
desc_lines = ctx.description().rstrip().split('\n')
|
|
1478
|
desc_lines = ctx.description().rstrip().split('\n')
|
|
1479
|
desc = desc_lines[0] #Commit always has a first line.
|
|
1479
|
desc = desc_lines[0] #Commit always has a first line.
|
|
1480
|
fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
|
|
1480
|
fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
|
|
1481
|
total=total, seqno=seqno, revwidth=revwidth,
|
|
1481
|
total=total, seqno=seqno, revwidth=revwidth,
|
|
1482
|
mode='wb', modemap=filemode)
|
|
1482
|
mode='wb', modemap=filemode)
|
|
1483
|
dest = fo.name
|
|
1483
|
dest = fo.name
|
|
1484
|
def write(s, **kw):
|
|
1484
|
def write(s, **kw):
|
|
1485
|
fo.write(s)
|
|
1485
|
fo.write(s)
|
|
1486
|
if not dest.startswith('<'):
|
|
1486
|
if not dest.startswith('<'):
|
|
1487
|
repo.ui.note("%s\n" % dest)
|
|
1487
|
repo.ui.note("%s\n" % dest)
|
|
1488
|
_exportsingle(
|
|
1488
|
_exportsingle(
|
|
1489
|
repo, ctx, match, switch_parent, rev, seqno, write, opts)
|
|
1489
|
repo, ctx, match, switch_parent, rev, seqno, write, opts)
|
|
1490
|
if fo is not None:
|
|
1490
|
if fo is not None:
|
|
1491
|
fo.close()
|
|
1491
|
fo.close()
|
|
1492
|
|
|
1492
|
|
|
1493
|
def diffordiffstat(ui, repo, diffopts, node1, node2, match,
|
|
1493
|
def diffordiffstat(ui, repo, diffopts, node1, node2, match,
|
|
1494
|
changes=None, stat=False, fp=None, prefix='',
|
|
1494
|
changes=None, stat=False, fp=None, prefix='',
|
|
1495
|
root='', listsubrepos=False, hunksfilterfn=None):
|
|
1495
|
root='', listsubrepos=False, hunksfilterfn=None):
|
|
1496
|
'''show diff or diffstat.'''
|
|
1496
|
'''show diff or diffstat.'''
|
|
1497
|
if fp is None:
|
|
1497
|
if fp is None:
|
|
1498
|
write = ui.write
|
|
1498
|
write = ui.write
|
|
1499
|
else:
|
|
1499
|
else:
|
|
1500
|
def write(s, **kw):
|
|
1500
|
def write(s, **kw):
|
|
1501
|
fp.write(s)
|
|
1501
|
fp.write(s)
|
|
1502
|
|
|
1502
|
|
|
1503
|
if root:
|
|
1503
|
if root:
|
|
1504
|
relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
|
|
1504
|
relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
|
|
1505
|
else:
|
|
1505
|
else:
|
|
1506
|
relroot = ''
|
|
1506
|
relroot = ''
|
|
1507
|
if relroot != '':
|
|
1507
|
if relroot != '':
|
|
1508
|
# XXX relative roots currently don't work if the root is within a
|
|
1508
|
# XXX relative roots currently don't work if the root is within a
|
|
1509
|
# subrepo
|
|
1509
|
# subrepo
|
|
1510
|
uirelroot = match.uipath(relroot)
|
|
1510
|
uirelroot = match.uipath(relroot)
|
|
1511
|
relroot += '/'
|
|
1511
|
relroot += '/'
|
|
1512
|
for matchroot in match.files():
|
|
1512
|
for matchroot in match.files():
|
|
1513
|
if not matchroot.startswith(relroot):
|
|
1513
|
if not matchroot.startswith(relroot):
|
|
1514
|
ui.warn(_('warning: %s not inside relative root %s\n') % (
|
|
1514
|
ui.warn(_('warning: %s not inside relative root %s\n') % (
|
|
1515
|
match.uipath(matchroot), uirelroot))
|
|
1515
|
match.uipath(matchroot), uirelroot))
|
|
1516
|
|
|
1516
|
|
|
1517
|
if stat:
|
|
1517
|
if stat:
|
|
1518
|
diffopts = diffopts.copy(context=0, noprefix=False)
|
|
1518
|
diffopts = diffopts.copy(context=0, noprefix=False)
|
|
1519
|
width = 80
|
|
1519
|
width = 80
|
|
1520
|
if not ui.plain():
|
|
1520
|
if not ui.plain():
|
|
1521
|
width = ui.termwidth()
|
|
1521
|
width = ui.termwidth()
|
|
1522
|
chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
|
|
1522
|
chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
|
|
1523
|
prefix=prefix, relroot=relroot,
|
|
1523
|
prefix=prefix, relroot=relroot,
|
|
1524
|
hunksfilterfn=hunksfilterfn)
|
|
1524
|
hunksfilterfn=hunksfilterfn)
|
|
1525
|
for chunk, label in patch.diffstatui(util.iterlines(chunks),
|
|
1525
|
for chunk, label in patch.diffstatui(util.iterlines(chunks),
|
|
1526
|
width=width):
|
|
1526
|
width=width):
|
|
1527
|
write(chunk, label=label)
|
|
1527
|
write(chunk, label=label)
|
|
1528
|
else:
|
|
1528
|
else:
|
|
1529
|
for chunk, label in patch.diffui(repo, node1, node2, match,
|
|
1529
|
for chunk, label in patch.diffui(repo, node1, node2, match,
|
|
1530
|
changes, opts=diffopts, prefix=prefix,
|
|
1530
|
changes, opts=diffopts, prefix=prefix,
|
|
1531
|
relroot=relroot,
|
|
1531
|
relroot=relroot,
|
|
1532
|
hunksfilterfn=hunksfilterfn):
|
|
1532
|
hunksfilterfn=hunksfilterfn):
|
|
1533
|
write(chunk, label=label)
|
|
1533
|
write(chunk, label=label)
|
|
1534
|
|
|
1534
|
|
|
1535
|
if listsubrepos:
|
|
1535
|
if listsubrepos:
|
|
1536
|
ctx1 = repo[node1]
|
|
1536
|
ctx1 = repo[node1]
|
|
1537
|
ctx2 = repo[node2]
|
|
1537
|
ctx2 = repo[node2]
|
|
1538
|
for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
|
|
1538
|
for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
|
|
1539
|
tempnode2 = node2
|
|
1539
|
tempnode2 = node2
|
|
1540
|
try:
|
|
1540
|
try:
|
|
1541
|
if node2 is not None:
|
|
1541
|
if node2 is not None:
|
|
1542
|
tempnode2 = ctx2.substate[subpath][1]
|
|
1542
|
tempnode2 = ctx2.substate[subpath][1]
|
|
1543
|
except KeyError:
|
|
1543
|
except KeyError:
|
|
1544
|
# A subrepo that existed in node1 was deleted between node1 and
|
|
1544
|
# A subrepo that existed in node1 was deleted between node1 and
|
|
1545
|
# node2 (inclusive). Thus, ctx2's substate won't contain that
|
|
1545
|
# node2 (inclusive). Thus, ctx2's substate won't contain that
|
|
1546
|
# subpath. The best we can do is to ignore it.
|
|
1546
|
# subpath. The best we can do is to ignore it.
|
|
1547
|
tempnode2 = None
|
|
1547
|
tempnode2 = None
|
|
1548
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
1548
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
1549
|
sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
|
|
1549
|
sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
|
|
1550
|
stat=stat, fp=fp, prefix=prefix)
|
|
1550
|
stat=stat, fp=fp, prefix=prefix)
|
|
1551
|
|
|
1551
|
|
|
1552
|
def _changesetlabels(ctx):
|
|
1552
|
def _changesetlabels(ctx):
|
|
1553
|
labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
|
|
1553
|
labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
|
|
1554
|
if ctx.obsolete():
|
|
1554
|
if ctx.obsolete():
|
|
1555
|
labels.append('changeset.obsolete')
|
|
1555
|
labels.append('changeset.obsolete')
|
|
1556
|
if ctx.isunstable():
|
|
1556
|
if ctx.isunstable():
|
|
1557
|
labels.append('changeset.unstable')
|
|
1557
|
labels.append('changeset.unstable')
|
|
1558
|
for instability in ctx.instabilities():
|
|
1558
|
for instability in ctx.instabilities():
|
|
1559
|
labels.append('instability.%s' % instability)
|
|
1559
|
labels.append('instability.%s' % instability)
|
|
1560
|
return ' '.join(labels)
|
|
1560
|
return ' '.join(labels)
|
|
1561
|
|
|
1561
|
|
|
1562
|
class changeset_printer(object):
|
|
1562
|
class changeset_printer(object):
|
|
1563
|
'''show changeset information when templating not requested.'''
|
|
1563
|
'''show changeset information when templating not requested.'''
|
|
1564
|
|
|
1564
|
|
|
1565
|
def __init__(self, ui, repo, matchfn, diffopts, buffered):
|
|
1565
|
def __init__(self, ui, repo, matchfn, diffopts, buffered):
|
|
1566
|
self.ui = ui
|
|
1566
|
self.ui = ui
|
|
1567
|
self.repo = repo
|
|
1567
|
self.repo = repo
|
|
1568
|
self.buffered = buffered
|
|
1568
|
self.buffered = buffered
|
|
1569
|
self.matchfn = matchfn
|
|
1569
|
self.matchfn = matchfn
|
|
1570
|
self.diffopts = diffopts
|
|
1570
|
self.diffopts = diffopts
|
|
1571
|
self.header = {}
|
|
1571
|
self.header = {}
|
|
1572
|
self.hunk = {}
|
|
1572
|
self.hunk = {}
|
|
1573
|
self.lastheader = None
|
|
1573
|
self.lastheader = None
|
|
1574
|
self.footer = None
|
|
1574
|
self.footer = None
|
|
1575
|
self._columns = templatekw.getlogcolumns()
|
|
1575
|
self._columns = templatekw.getlogcolumns()
|
|
1576
|
|
|
1576
|
|
|
1577
|
def flush(self, ctx):
|
|
1577
|
def flush(self, ctx):
|
|
1578
|
rev = ctx.rev()
|
|
1578
|
rev = ctx.rev()
|
|
1579
|
if rev in self.header:
|
|
1579
|
if rev in self.header:
|
|
1580
|
h = self.header[rev]
|
|
1580
|
h = self.header[rev]
|
|
1581
|
if h != self.lastheader:
|
|
1581
|
if h != self.lastheader:
|
|
1582
|
self.lastheader = h
|
|
1582
|
self.lastheader = h
|
|
1583
|
self.ui.write(h)
|
|
1583
|
self.ui.write(h)
|
|
1584
|
del self.header[rev]
|
|
1584
|
del self.header[rev]
|
|
1585
|
if rev in self.hunk:
|
|
1585
|
if rev in self.hunk:
|
|
1586
|
self.ui.write(self.hunk[rev])
|
|
1586
|
self.ui.write(self.hunk[rev])
|
|
1587
|
del self.hunk[rev]
|
|
1587
|
del self.hunk[rev]
|
|
1588
|
return 1
|
|
1588
|
return 1
|
|
1589
|
return 0
|
|
1589
|
return 0
|
|
1590
|
|
|
1590
|
|
|
1591
|
def close(self):
|
|
1591
|
def close(self):
|
|
1592
|
if self.footer:
|
|
1592
|
if self.footer:
|
|
1593
|
self.ui.write(self.footer)
|
|
1593
|
self.ui.write(self.footer)
|
|
1594
|
|
|
1594
|
|
|
1595
|
def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
|
|
1595
|
def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
|
|
1596
|
**props):
|
|
1596
|
**props):
|
|
1597
|
props = pycompat.byteskwargs(props)
|
|
1597
|
props = pycompat.byteskwargs(props)
|
|
1598
|
if self.buffered:
|
|
1598
|
if self.buffered:
|
|
1599
|
self.ui.pushbuffer(labeled=True)
|
|
1599
|
self.ui.pushbuffer(labeled=True)
|
|
1600
|
self._show(ctx, copies, matchfn, hunksfilterfn, props)
|
|
1600
|
self._show(ctx, copies, matchfn, hunksfilterfn, props)
|
|
1601
|
self.hunk[ctx.rev()] = self.ui.popbuffer()
|
|
1601
|
self.hunk[ctx.rev()] = self.ui.popbuffer()
|
|
1602
|
else:
|
|
1602
|
else:
|
|
1603
|
self._show(ctx, copies, matchfn, hunksfilterfn, props)
|
|
1603
|
self._show(ctx, copies, matchfn, hunksfilterfn, props)
|
|
1604
|
|
|
1604
|
|
|
1605
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1605
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1606
|
'''show a single changeset or file revision'''
|
|
1606
|
'''show a single changeset or file revision'''
|
|
1607
|
changenode = ctx.node()
|
|
1607
|
changenode = ctx.node()
|
|
1608
|
rev = ctx.rev()
|
|
1608
|
rev = ctx.rev()
|
|
1609
|
|
|
1609
|
|
|
1610
|
if self.ui.quiet:
|
|
1610
|
if self.ui.quiet:
|
|
1611
|
self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
|
|
1611
|
self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
|
|
1612
|
label='log.node')
|
|
1612
|
label='log.node')
|
|
1613
|
return
|
|
1613
|
return
|
|
1614
|
|
|
1614
|
|
|
1615
|
columns = self._columns
|
|
1615
|
columns = self._columns
|
|
1616
|
self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
|
|
1616
|
self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
|
|
1617
|
label=_changesetlabels(ctx))
|
|
1617
|
label=_changesetlabels(ctx))
|
|
1618
|
|
|
1618
|
|
|
1619
|
# branches are shown first before any other names due to backwards
|
|
1619
|
# branches are shown first before any other names due to backwards
|
|
1620
|
# compatibility
|
|
1620
|
# compatibility
|
|
1621
|
branch = ctx.branch()
|
|
1621
|
branch = ctx.branch()
|
|
1622
|
# don't show the default branch name
|
|
1622
|
# don't show the default branch name
|
|
1623
|
if branch != 'default':
|
|
1623
|
if branch != 'default':
|
|
1624
|
self.ui.write(columns['branch'] % branch, label='log.branch')
|
|
1624
|
self.ui.write(columns['branch'] % branch, label='log.branch')
|
|
1625
|
|
|
1625
|
|
|
1626
|
for nsname, ns in self.repo.names.iteritems():
|
|
1626
|
for nsname, ns in self.repo.names.iteritems():
|
|
1627
|
# branches has special logic already handled above, so here we just
|
|
1627
|
# branches has special logic already handled above, so here we just
|
|
1628
|
# skip it
|
|
1628
|
# skip it
|
|
1629
|
if nsname == 'branches':
|
|
1629
|
if nsname == 'branches':
|
|
1630
|
continue
|
|
1630
|
continue
|
|
1631
|
# we will use the templatename as the color name since those two
|
|
1631
|
# we will use the templatename as the color name since those two
|
|
1632
|
# should be the same
|
|
1632
|
# should be the same
|
|
1633
|
for name in ns.names(self.repo, changenode):
|
|
1633
|
for name in ns.names(self.repo, changenode):
|
|
1634
|
self.ui.write(ns.logfmt % name,
|
|
1634
|
self.ui.write(ns.logfmt % name,
|
|
1635
|
label='log.%s' % ns.colorname)
|
|
1635
|
label='log.%s' % ns.colorname)
|
|
1636
|
if self.ui.debugflag:
|
|
1636
|
if self.ui.debugflag:
|
|
1637
|
self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
|
|
1637
|
self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
|
|
1638
|
for pctx in scmutil.meaningfulparents(self.repo, ctx):
|
|
1638
|
for pctx in scmutil.meaningfulparents(self.repo, ctx):
|
|
1639
|
label = 'log.parent changeset.%s' % pctx.phasestr()
|
|
1639
|
label = 'log.parent changeset.%s' % pctx.phasestr()
|
|
1640
|
self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
|
|
1640
|
self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
|
|
1641
|
label=label)
|
|
1641
|
label=label)
|
|
1642
|
|
|
1642
|
|
|
1643
|
if self.ui.debugflag and rev is not None:
|
|
1643
|
if self.ui.debugflag and rev is not None:
|
|
1644
|
mnode = ctx.manifestnode()
|
|
1644
|
mnode = ctx.manifestnode()
|
|
1645
|
mrev = self.repo.manifestlog._revlog.rev(mnode)
|
|
1645
|
mrev = self.repo.manifestlog._revlog.rev(mnode)
|
|
1646
|
self.ui.write(columns['manifest']
|
|
1646
|
self.ui.write(columns['manifest']
|
|
1647
|
% scmutil.formatrevnode(self.ui, mrev, mnode),
|
|
1647
|
% scmutil.formatrevnode(self.ui, mrev, mnode),
|
|
1648
|
label='ui.debug log.manifest')
|
|
1648
|
label='ui.debug log.manifest')
|
|
1649
|
self.ui.write(columns['user'] % ctx.user(), label='log.user')
|
|
1649
|
self.ui.write(columns['user'] % ctx.user(), label='log.user')
|
|
1650
|
self.ui.write(columns['date'] % util.datestr(ctx.date()),
|
|
1650
|
self.ui.write(columns['date'] % util.datestr(ctx.date()),
|
|
1651
|
label='log.date')
|
|
1651
|
label='log.date')
|
|
1652
|
|
|
1652
|
|
|
1653
|
if ctx.isunstable():
|
|
1653
|
if ctx.isunstable():
|
|
1654
|
instabilities = ctx.instabilities()
|
|
1654
|
instabilities = ctx.instabilities()
|
|
1655
|
self.ui.write(columns['instability'] % ', '.join(instabilities),
|
|
1655
|
self.ui.write(columns['instability'] % ', '.join(instabilities),
|
|
1656
|
label='log.instability')
|
|
1656
|
label='log.instability')
|
|
1657
|
|
|
1657
|
|
|
1658
|
elif ctx.obsolete():
|
|
1658
|
elif ctx.obsolete():
|
|
1659
|
self._showobsfate(ctx)
|
|
1659
|
self._showobsfate(ctx)
|
|
1660
|
|
|
1660
|
|
|
1661
|
self._exthook(ctx)
|
|
1661
|
self._exthook(ctx)
|
|
1662
|
|
|
1662
|
|
|
1663
|
if self.ui.debugflag:
|
|
1663
|
if self.ui.debugflag:
|
|
1664
|
files = ctx.p1().status(ctx)[:3]
|
|
1664
|
files = ctx.p1().status(ctx)[:3]
|
|
1665
|
for key, value in zip(['files', 'files+', 'files-'], files):
|
|
1665
|
for key, value in zip(['files', 'files+', 'files-'], files):
|
|
1666
|
if value:
|
|
1666
|
if value:
|
|
1667
|
self.ui.write(columns[key] % " ".join(value),
|
|
1667
|
self.ui.write(columns[key] % " ".join(value),
|
|
1668
|
label='ui.debug log.files')
|
|
1668
|
label='ui.debug log.files')
|
|
1669
|
elif ctx.files() and self.ui.verbose:
|
|
1669
|
elif ctx.files() and self.ui.verbose:
|
|
1670
|
self.ui.write(columns['files'] % " ".join(ctx.files()),
|
|
1670
|
self.ui.write(columns['files'] % " ".join(ctx.files()),
|
|
1671
|
label='ui.note log.files')
|
|
1671
|
label='ui.note log.files')
|
|
1672
|
if copies and self.ui.verbose:
|
|
1672
|
if copies and self.ui.verbose:
|
|
1673
|
copies = ['%s (%s)' % c for c in copies]
|
|
1673
|
copies = ['%s (%s)' % c for c in copies]
|
|
1674
|
self.ui.write(columns['copies'] % ' '.join(copies),
|
|
1674
|
self.ui.write(columns['copies'] % ' '.join(copies),
|
|
1675
|
label='ui.note log.copies')
|
|
1675
|
label='ui.note log.copies')
|
|
1676
|
|
|
1676
|
|
|
1677
|
extra = ctx.extra()
|
|
1677
|
extra = ctx.extra()
|
|
1678
|
if extra and self.ui.debugflag:
|
|
1678
|
if extra and self.ui.debugflag:
|
|
1679
|
for key, value in sorted(extra.items()):
|
|
1679
|
for key, value in sorted(extra.items()):
|
|
1680
|
self.ui.write(columns['extra'] % (key, util.escapestr(value)),
|
|
1680
|
self.ui.write(columns['extra'] % (key, util.escapestr(value)),
|
|
1681
|
label='ui.debug log.extra')
|
|
1681
|
label='ui.debug log.extra')
|
|
1682
|
|
|
1682
|
|
|
1683
|
description = ctx.description().strip()
|
|
1683
|
description = ctx.description().strip()
|
|
1684
|
if description:
|
|
1684
|
if description:
|
|
1685
|
if self.ui.verbose:
|
|
1685
|
if self.ui.verbose:
|
|
1686
|
self.ui.write(_("description:\n"),
|
|
1686
|
self.ui.write(_("description:\n"),
|
|
1687
|
label='ui.note log.description')
|
|
1687
|
label='ui.note log.description')
|
|
1688
|
self.ui.write(description,
|
|
1688
|
self.ui.write(description,
|
|
1689
|
label='ui.note log.description')
|
|
1689
|
label='ui.note log.description')
|
|
1690
|
self.ui.write("\n\n")
|
|
1690
|
self.ui.write("\n\n")
|
|
1691
|
else:
|
|
1691
|
else:
|
|
1692
|
self.ui.write(columns['summary'] % description.splitlines()[0],
|
|
1692
|
self.ui.write(columns['summary'] % description.splitlines()[0],
|
|
1693
|
label='log.summary')
|
|
1693
|
label='log.summary')
|
|
1694
|
self.ui.write("\n")
|
|
1694
|
self.ui.write("\n")
|
|
1695
|
|
|
1695
|
|
|
1696
|
self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
|
|
1696
|
self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
|
|
1697
|
|
|
1697
|
|
|
1698
|
def _showobsfate(self, ctx):
|
|
1698
|
def _showobsfate(self, ctx):
|
|
1699
|
obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
|
|
1699
|
obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
|
|
1700
|
|
|
1700
|
|
|
1701
|
if obsfate:
|
|
1701
|
if obsfate:
|
|
1702
|
for obsfateline in obsfate:
|
|
1702
|
for obsfateline in obsfate:
|
|
1703
|
self.ui.write(self._columns['obsolete'] % obsfateline,
|
|
1703
|
self.ui.write(self._columns['obsolete'] % obsfateline,
|
|
1704
|
label='log.obsfate')
|
|
1704
|
label='log.obsfate')
|
|
1705
|
|
|
1705
|
|
|
1706
|
def _exthook(self, ctx):
|
|
1706
|
def _exthook(self, ctx):
|
|
1707
|
'''empty method used by extension as a hook point
|
|
1707
|
'''empty method used by extension as a hook point
|
|
1708
|
'''
|
|
1708
|
'''
|
|
1709
|
|
|
1709
|
|
|
1710
|
def showpatch(self, ctx, matchfn, hunksfilterfn=None):
|
|
1710
|
def showpatch(self, ctx, matchfn, hunksfilterfn=None):
|
|
1711
|
if not matchfn:
|
|
1711
|
if not matchfn:
|
|
1712
|
matchfn = self.matchfn
|
|
1712
|
matchfn = self.matchfn
|
|
1713
|
if matchfn:
|
|
1713
|
if matchfn:
|
|
1714
|
stat = self.diffopts.get('stat')
|
|
1714
|
stat = self.diffopts.get('stat')
|
|
1715
|
diff = self.diffopts.get('patch')
|
|
1715
|
diff = self.diffopts.get('patch')
|
|
1716
|
diffopts = patch.diffallopts(self.ui, self.diffopts)
|
|
1716
|
diffopts = patch.diffallopts(self.ui, self.diffopts)
|
|
1717
|
node = ctx.node()
|
|
1717
|
node = ctx.node()
|
|
1718
|
prev = ctx.p1().node()
|
|
1718
|
prev = ctx.p1().node()
|
|
1719
|
if stat:
|
|
1719
|
if stat:
|
|
1720
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1720
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1721
|
match=matchfn, stat=True,
|
|
1721
|
match=matchfn, stat=True,
|
|
1722
|
hunksfilterfn=hunksfilterfn)
|
|
1722
|
hunksfilterfn=hunksfilterfn)
|
|
1723
|
if diff:
|
|
1723
|
if diff:
|
|
1724
|
if stat:
|
|
1724
|
if stat:
|
|
1725
|
self.ui.write("\n")
|
|
1725
|
self.ui.write("\n")
|
|
1726
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1726
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1727
|
match=matchfn, stat=False,
|
|
1727
|
match=matchfn, stat=False,
|
|
1728
|
hunksfilterfn=hunksfilterfn)
|
|
1728
|
hunksfilterfn=hunksfilterfn)
|
|
1729
|
self.ui.write("\n")
|
|
1729
|
self.ui.write("\n")
|
|
1730
|
|
|
1730
|
|
|
1731
|
class jsonchangeset(changeset_printer):
|
|
1731
|
class jsonchangeset(changeset_printer):
|
|
1732
|
'''format changeset information.'''
|
|
1732
|
'''format changeset information.'''
|
|
1733
|
|
|
1733
|
|
|
1734
|
def __init__(self, ui, repo, matchfn, diffopts, buffered):
|
|
1734
|
def __init__(self, ui, repo, matchfn, diffopts, buffered):
|
|
1735
|
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
|
|
1735
|
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
|
|
1736
|
self.cache = {}
|
|
1736
|
self.cache = {}
|
|
1737
|
self._first = True
|
|
1737
|
self._first = True
|
|
1738
|
|
|
1738
|
|
|
1739
|
def close(self):
|
|
1739
|
def close(self):
|
|
1740
|
if not self._first:
|
|
1740
|
if not self._first:
|
|
1741
|
self.ui.write("\n]\n")
|
|
1741
|
self.ui.write("\n]\n")
|
|
1742
|
else:
|
|
1742
|
else:
|
|
1743
|
self.ui.write("[]\n")
|
|
1743
|
self.ui.write("[]\n")
|
|
1744
|
|
|
1744
|
|
|
1745
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1745
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1746
|
'''show a single changeset or file revision'''
|
|
1746
|
'''show a single changeset or file revision'''
|
|
1747
|
rev = ctx.rev()
|
|
1747
|
rev = ctx.rev()
|
|
1748
|
if rev is None:
|
|
1748
|
if rev is None:
|
|
1749
|
jrev = jnode = 'null'
|
|
1749
|
jrev = jnode = 'null'
|
|
1750
|
else:
|
|
1750
|
else:
|
|
1751
|
jrev = '%d' % rev
|
|
1751
|
jrev = '%d' % rev
|
|
1752
|
jnode = '"%s"' % hex(ctx.node())
|
|
1752
|
jnode = '"%s"' % hex(ctx.node())
|
|
1753
|
j = encoding.jsonescape
|
|
1753
|
j = encoding.jsonescape
|
|
1754
|
|
|
1754
|
|
|
1755
|
if self._first:
|
|
1755
|
if self._first:
|
|
1756
|
self.ui.write("[\n {")
|
|
1756
|
self.ui.write("[\n {")
|
|
1757
|
self._first = False
|
|
1757
|
self._first = False
|
|
1758
|
else:
|
|
1758
|
else:
|
|
1759
|
self.ui.write(",\n {")
|
|
1759
|
self.ui.write(",\n {")
|
|
1760
|
|
|
1760
|
|
|
1761
|
if self.ui.quiet:
|
|
1761
|
if self.ui.quiet:
|
|
1762
|
self.ui.write(('\n "rev": %s') % jrev)
|
|
1762
|
self.ui.write(('\n "rev": %s') % jrev)
|
|
1763
|
self.ui.write((',\n "node": %s') % jnode)
|
|
1763
|
self.ui.write((',\n "node": %s') % jnode)
|
|
1764
|
self.ui.write('\n }')
|
|
1764
|
self.ui.write('\n }')
|
|
1765
|
return
|
|
1765
|
return
|
|
1766
|
|
|
1766
|
|
|
1767
|
self.ui.write(('\n "rev": %s') % jrev)
|
|
1767
|
self.ui.write(('\n "rev": %s') % jrev)
|
|
1768
|
self.ui.write((',\n "node": %s') % jnode)
|
|
1768
|
self.ui.write((',\n "node": %s') % jnode)
|
|
1769
|
self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
|
|
1769
|
self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
|
|
1770
|
self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
|
|
1770
|
self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
|
|
1771
|
self.ui.write((',\n "user": "%s"') % j(ctx.user()))
|
|
1771
|
self.ui.write((',\n "user": "%s"') % j(ctx.user()))
|
|
1772
|
self.ui.write((',\n "date": [%d, %d]') % ctx.date())
|
|
1772
|
self.ui.write((',\n "date": [%d, %d]') % ctx.date())
|
|
1773
|
self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
|
|
1773
|
self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
|
|
1774
|
|
|
1774
|
|
|
1775
|
self.ui.write((',\n "bookmarks": [%s]') %
|
|
1775
|
self.ui.write((',\n "bookmarks": [%s]') %
|
|
1776
|
", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
|
|
1776
|
", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
|
|
1777
|
self.ui.write((',\n "tags": [%s]') %
|
|
1777
|
self.ui.write((',\n "tags": [%s]') %
|
|
1778
|
", ".join('"%s"' % j(t) for t in ctx.tags()))
|
|
1778
|
", ".join('"%s"' % j(t) for t in ctx.tags()))
|
|
1779
|
self.ui.write((',\n "parents": [%s]') %
|
|
1779
|
self.ui.write((',\n "parents": [%s]') %
|
|
1780
|
", ".join('"%s"' % c.hex() for c in ctx.parents()))
|
|
1780
|
", ".join('"%s"' % c.hex() for c in ctx.parents()))
|
|
1781
|
|
|
1781
|
|
|
1782
|
if self.ui.debugflag:
|
|
1782
|
if self.ui.debugflag:
|
|
1783
|
if rev is None:
|
|
1783
|
if rev is None:
|
|
1784
|
jmanifestnode = 'null'
|
|
1784
|
jmanifestnode = 'null'
|
|
1785
|
else:
|
|
1785
|
else:
|
|
1786
|
jmanifestnode = '"%s"' % hex(ctx.manifestnode())
|
|
1786
|
jmanifestnode = '"%s"' % hex(ctx.manifestnode())
|
|
1787
|
self.ui.write((',\n "manifest": %s') % jmanifestnode)
|
|
1787
|
self.ui.write((',\n "manifest": %s') % jmanifestnode)
|
|
1788
|
|
|
1788
|
|
|
1789
|
self.ui.write((',\n "extra": {%s}') %
|
|
1789
|
self.ui.write((',\n "extra": {%s}') %
|
|
1790
|
", ".join('"%s": "%s"' % (j(k), j(v))
|
|
1790
|
", ".join('"%s": "%s"' % (j(k), j(v))
|
|
1791
|
for k, v in ctx.extra().items()))
|
|
1791
|
for k, v in ctx.extra().items()))
|
|
1792
|
|
|
1792
|
|
|
1793
|
files = ctx.p1().status(ctx)
|
|
1793
|
files = ctx.p1().status(ctx)
|
|
1794
|
self.ui.write((',\n "modified": [%s]') %
|
|
1794
|
self.ui.write((',\n "modified": [%s]') %
|
|
1795
|
", ".join('"%s"' % j(f) for f in files[0]))
|
|
1795
|
", ".join('"%s"' % j(f) for f in files[0]))
|
|
1796
|
self.ui.write((',\n "added": [%s]') %
|
|
1796
|
self.ui.write((',\n "added": [%s]') %
|
|
1797
|
", ".join('"%s"' % j(f) for f in files[1]))
|
|
1797
|
", ".join('"%s"' % j(f) for f in files[1]))
|
|
1798
|
self.ui.write((',\n "removed": [%s]') %
|
|
1798
|
self.ui.write((',\n "removed": [%s]') %
|
|
1799
|
", ".join('"%s"' % j(f) for f in files[2]))
|
|
1799
|
", ".join('"%s"' % j(f) for f in files[2]))
|
|
1800
|
|
|
1800
|
|
|
1801
|
elif self.ui.verbose:
|
|
1801
|
elif self.ui.verbose:
|
|
1802
|
self.ui.write((',\n "files": [%s]') %
|
|
1802
|
self.ui.write((',\n "files": [%s]') %
|
|
1803
|
", ".join('"%s"' % j(f) for f in ctx.files()))
|
|
1803
|
", ".join('"%s"' % j(f) for f in ctx.files()))
|
|
1804
|
|
|
1804
|
|
|
1805
|
if copies:
|
|
1805
|
if copies:
|
|
1806
|
self.ui.write((',\n "copies": {%s}') %
|
|
1806
|
self.ui.write((',\n "copies": {%s}') %
|
|
1807
|
", ".join('"%s": "%s"' % (j(k), j(v))
|
|
1807
|
", ".join('"%s": "%s"' % (j(k), j(v))
|
|
1808
|
for k, v in copies))
|
|
1808
|
for k, v in copies))
|
|
1809
|
|
|
1809
|
|
|
1810
|
matchfn = self.matchfn
|
|
1810
|
matchfn = self.matchfn
|
|
1811
|
if matchfn:
|
|
1811
|
if matchfn:
|
|
1812
|
stat = self.diffopts.get('stat')
|
|
1812
|
stat = self.diffopts.get('stat')
|
|
1813
|
diff = self.diffopts.get('patch')
|
|
1813
|
diff = self.diffopts.get('patch')
|
|
1814
|
diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
|
|
1814
|
diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
|
|
1815
|
node, prev = ctx.node(), ctx.p1().node()
|
|
1815
|
node, prev = ctx.node(), ctx.p1().node()
|
|
1816
|
if stat:
|
|
1816
|
if stat:
|
|
1817
|
self.ui.pushbuffer()
|
|
1817
|
self.ui.pushbuffer()
|
|
1818
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1818
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1819
|
match=matchfn, stat=True)
|
|
1819
|
match=matchfn, stat=True)
|
|
1820
|
self.ui.write((',\n "diffstat": "%s"')
|
|
1820
|
self.ui.write((',\n "diffstat": "%s"')
|
|
1821
|
% j(self.ui.popbuffer()))
|
|
1821
|
% j(self.ui.popbuffer()))
|
|
1822
|
if diff:
|
|
1822
|
if diff:
|
|
1823
|
self.ui.pushbuffer()
|
|
1823
|
self.ui.pushbuffer()
|
|
1824
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1824
|
diffordiffstat(self.ui, self.repo, diffopts, prev, node,
|
|
1825
|
match=matchfn, stat=False)
|
|
1825
|
match=matchfn, stat=False)
|
|
1826
|
self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
|
|
1826
|
self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
|
|
1827
|
|
|
1827
|
|
|
1828
|
self.ui.write("\n }")
|
|
1828
|
self.ui.write("\n }")
|
|
1829
|
|
|
1829
|
|
|
1830
|
class changeset_templater(changeset_printer):
|
|
1830
|
class changeset_templater(changeset_printer):
|
|
1831
|
'''format changeset information.
|
|
1831
|
'''format changeset information.
|
|
1832
|
|
|
1832
|
|
|
1833
|
Note: there are a variety of convenience functions to build a
|
|
1833
|
Note: there are a variety of convenience functions to build a
|
|
1834
|
changeset_templater for common cases. See functions such as:
|
|
1834
|
changeset_templater for common cases. See functions such as:
|
|
1835
|
makelogtemplater, show_changeset, buildcommittemplate, or other
|
|
1835
|
makelogtemplater, show_changeset, buildcommittemplate, or other
|
|
1836
|
functions that use changesest_templater.
|
|
1836
|
functions that use changesest_templater.
|
|
1837
|
'''
|
|
1837
|
'''
|
|
1838
|
|
|
1838
|
|
|
1839
|
# Arguments before "buffered" used to be positional. Consider not
|
|
1839
|
# Arguments before "buffered" used to be positional. Consider not
|
|
1840
|
# adding/removing arguments before "buffered" to not break callers.
|
|
1840
|
# adding/removing arguments before "buffered" to not break callers.
|
|
1841
|
def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
|
|
1841
|
def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
|
|
1842
|
buffered=False):
|
|
1842
|
buffered=False):
|
|
1843
|
diffopts = diffopts or {}
|
|
1843
|
diffopts = diffopts or {}
|
|
1844
|
|
|
1844
|
|
|
1845
|
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
|
|
1845
|
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
|
|
1846
|
tres = formatter.templateresources(ui, repo)
|
|
1846
|
tres = formatter.templateresources(ui, repo)
|
|
1847
|
self.t = formatter.loadtemplater(ui, tmplspec, resources=tres,
|
|
1847
|
self.t = formatter.loadtemplater(ui, tmplspec,
|
|
|
|
|
1848
|
defaults=templatekw.keywords,
|
|
|
|
|
1849
|
resources=tres,
|
|
1848
|
cache=templatekw.defaulttempl)
|
|
1850
|
cache=templatekw.defaulttempl)
|
|
1849
|
self._counter = itertools.count()
|
|
1851
|
self._counter = itertools.count()
|
|
1850
|
self.cache = tres['cache'] # shared with _graphnodeformatter()
|
|
1852
|
self.cache = tres['cache'] # shared with _graphnodeformatter()
|
|
1851
|
|
|
1853
|
|
|
1852
|
self._tref = tmplspec.ref
|
|
1854
|
self._tref = tmplspec.ref
|
|
1853
|
self._parts = {'header': '', 'footer': '',
|
|
1855
|
self._parts = {'header': '', 'footer': '',
|
|
1854
|
tmplspec.ref: tmplspec.ref,
|
|
1856
|
tmplspec.ref: tmplspec.ref,
|
|
1855
|
'docheader': '', 'docfooter': '',
|
|
1857
|
'docheader': '', 'docfooter': '',
|
|
1856
|
'separator': ''}
|
|
1858
|
'separator': ''}
|
|
1857
|
if tmplspec.mapfile:
|
|
1859
|
if tmplspec.mapfile:
|
|
1858
|
# find correct templates for current mode, for backward
|
|
1860
|
# find correct templates for current mode, for backward
|
|
1859
|
# compatibility with 'log -v/-q/--debug' using a mapfile
|
|
1861
|
# compatibility with 'log -v/-q/--debug' using a mapfile
|
|
1860
|
tmplmodes = [
|
|
1862
|
tmplmodes = [
|
|
1861
|
(True, ''),
|
|
1863
|
(True, ''),
|
|
1862
|
(self.ui.verbose, '_verbose'),
|
|
1864
|
(self.ui.verbose, '_verbose'),
|
|
1863
|
(self.ui.quiet, '_quiet'),
|
|
1865
|
(self.ui.quiet, '_quiet'),
|
|
1864
|
(self.ui.debugflag, '_debug'),
|
|
1866
|
(self.ui.debugflag, '_debug'),
|
|
1865
|
]
|
|
1867
|
]
|
|
1866
|
for mode, postfix in tmplmodes:
|
|
1868
|
for mode, postfix in tmplmodes:
|
|
1867
|
for t in self._parts:
|
|
1869
|
for t in self._parts:
|
|
1868
|
cur = t + postfix
|
|
1870
|
cur = t + postfix
|
|
1869
|
if mode and cur in self.t:
|
|
1871
|
if mode and cur in self.t:
|
|
1870
|
self._parts[t] = cur
|
|
1872
|
self._parts[t] = cur
|
|
1871
|
else:
|
|
1873
|
else:
|
|
1872
|
partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
|
|
1874
|
partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
|
|
1873
|
m = formatter.templatepartsmap(tmplspec, self.t, partnames)
|
|
1875
|
m = formatter.templatepartsmap(tmplspec, self.t, partnames)
|
|
1874
|
self._parts.update(m)
|
|
1876
|
self._parts.update(m)
|
|
1875
|
|
|
1877
|
|
|
1876
|
if self._parts['docheader']:
|
|
1878
|
if self._parts['docheader']:
|
|
1877
|
self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
|
|
1879
|
self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
|
|
1878
|
|
|
1880
|
|
|
1879
|
def close(self):
|
|
1881
|
def close(self):
|
|
1880
|
if self._parts['docfooter']:
|
|
1882
|
if self._parts['docfooter']:
|
|
1881
|
if not self.footer:
|
|
1883
|
if not self.footer:
|
|
1882
|
self.footer = ""
|
|
1884
|
self.footer = ""
|
|
1883
|
self.footer += templater.stringify(self.t(self._parts['docfooter']))
|
|
1885
|
self.footer += templater.stringify(self.t(self._parts['docfooter']))
|
|
1884
|
return super(changeset_templater, self).close()
|
|
1886
|
return super(changeset_templater, self).close()
|
|
1885
|
|
|
1887
|
|
|
1886
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1888
|
def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
|
|
1887
|
'''show a single changeset or file revision'''
|
|
1889
|
'''show a single changeset or file revision'''
|
|
1888
|
props = props.copy()
|
|
1890
|
props = props.copy()
|
|
1889
|
props.update(templatekw.keywords)
|
|
|
|
|
1890
|
props['ctx'] = ctx
|
|
1891
|
props['ctx'] = ctx
|
|
1891
|
props['index'] = index = next(self._counter)
|
|
1892
|
props['index'] = index = next(self._counter)
|
|
1892
|
props['revcache'] = {'copies': copies}
|
|
1893
|
props['revcache'] = {'copies': copies}
|
|
1893
|
props = pycompat.strkwargs(props)
|
|
1894
|
props = pycompat.strkwargs(props)
|
|
1894
|
|
|
1895
|
|
|
1895
|
# write separator, which wouldn't work well with the header part below
|
|
1896
|
# write separator, which wouldn't work well with the header part below
|
|
1896
|
# since there's inherently a conflict between header (across items) and
|
|
1897
|
# since there's inherently a conflict between header (across items) and
|
|
1897
|
# separator (per item)
|
|
1898
|
# separator (per item)
|
|
1898
|
if self._parts['separator'] and index > 0:
|
|
1899
|
if self._parts['separator'] and index > 0:
|
|
1899
|
self.ui.write(templater.stringify(self.t(self._parts['separator'])))
|
|
1900
|
self.ui.write(templater.stringify(self.t(self._parts['separator'])))
|
|
1900
|
|
|
1901
|
|
|
1901
|
# write header
|
|
1902
|
# write header
|
|
1902
|
if self._parts['header']:
|
|
1903
|
if self._parts['header']:
|
|
1903
|
h = templater.stringify(self.t(self._parts['header'], **props))
|
|
1904
|
h = templater.stringify(self.t(self._parts['header'], **props))
|
|
1904
|
if self.buffered:
|
|
1905
|
if self.buffered:
|
|
1905
|
self.header[ctx.rev()] = h
|
|
1906
|
self.header[ctx.rev()] = h
|
|
1906
|
else:
|
|
1907
|
else:
|
|
1907
|
if self.lastheader != h:
|
|
1908
|
if self.lastheader != h:
|
|
1908
|
self.lastheader = h
|
|
1909
|
self.lastheader = h
|
|
1909
|
self.ui.write(h)
|
|
1910
|
self.ui.write(h)
|
|
1910
|
|
|
1911
|
|
|
1911
|
# write changeset metadata, then patch if requested
|
|
1912
|
# write changeset metadata, then patch if requested
|
|
1912
|
key = self._parts[self._tref]
|
|
1913
|
key = self._parts[self._tref]
|
|
1913
|
self.ui.write(templater.stringify(self.t(key, **props)))
|
|
1914
|
self.ui.write(templater.stringify(self.t(key, **props)))
|
|
1914
|
self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
|
|
1915
|
self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
|
|
1915
|
|
|
1916
|
|
|
1916
|
if self._parts['footer']:
|
|
1917
|
if self._parts['footer']:
|
|
1917
|
if not self.footer:
|
|
1918
|
if not self.footer:
|
|
1918
|
self.footer = templater.stringify(
|
|
1919
|
self.footer = templater.stringify(
|
|
1919
|
self.t(self._parts['footer'], **props))
|
|
1920
|
self.t(self._parts['footer'], **props))
|
|
1920
|
|
|
1921
|
|
|
1921
|
def logtemplatespec(tmpl, mapfile):
|
|
1922
|
def logtemplatespec(tmpl, mapfile):
|
|
1922
|
if mapfile:
|
|
1923
|
if mapfile:
|
|
1923
|
return formatter.templatespec('changeset', tmpl, mapfile)
|
|
1924
|
return formatter.templatespec('changeset', tmpl, mapfile)
|
|
1924
|
else:
|
|
1925
|
else:
|
|
1925
|
return formatter.templatespec('', tmpl, None)
|
|
1926
|
return formatter.templatespec('', tmpl, None)
|
|
1926
|
|
|
1927
|
|
|
1927
|
def _lookuplogtemplate(ui, tmpl, style):
|
|
1928
|
def _lookuplogtemplate(ui, tmpl, style):
|
|
1928
|
"""Find the template matching the given template spec or style
|
|
1929
|
"""Find the template matching the given template spec or style
|
|
1929
|
|
|
1930
|
|
|
1930
|
See formatter.lookuptemplate() for details.
|
|
1931
|
See formatter.lookuptemplate() for details.
|
|
1931
|
"""
|
|
1932
|
"""
|
|
1932
|
|
|
1933
|
|
|
1933
|
# ui settings
|
|
1934
|
# ui settings
|
|
1934
|
if not tmpl and not style: # template are stronger than style
|
|
1935
|
if not tmpl and not style: # template are stronger than style
|
|
1935
|
tmpl = ui.config('ui', 'logtemplate')
|
|
1936
|
tmpl = ui.config('ui', 'logtemplate')
|
|
1936
|
if tmpl:
|
|
1937
|
if tmpl:
|
|
1937
|
return logtemplatespec(templater.unquotestring(tmpl), None)
|
|
1938
|
return logtemplatespec(templater.unquotestring(tmpl), None)
|
|
1938
|
else:
|
|
1939
|
else:
|
|
1939
|
style = util.expandpath(ui.config('ui', 'style'))
|
|
1940
|
style = util.expandpath(ui.config('ui', 'style'))
|
|
1940
|
|
|
1941
|
|
|
1941
|
if not tmpl and style:
|
|
1942
|
if not tmpl and style:
|
|
1942
|
mapfile = style
|
|
1943
|
mapfile = style
|
|
1943
|
if not os.path.split(mapfile)[0]:
|
|
1944
|
if not os.path.split(mapfile)[0]:
|
|
1944
|
mapname = (templater.templatepath('map-cmdline.' + mapfile)
|
|
1945
|
mapname = (templater.templatepath('map-cmdline.' + mapfile)
|
|
1945
|
or templater.templatepath(mapfile))
|
|
1946
|
or templater.templatepath(mapfile))
|
|
1946
|
if mapname:
|
|
1947
|
if mapname:
|
|
1947
|
mapfile = mapname
|
|
1948
|
mapfile = mapname
|
|
1948
|
return logtemplatespec(None, mapfile)
|
|
1949
|
return logtemplatespec(None, mapfile)
|
|
1949
|
|
|
1950
|
|
|
1950
|
if not tmpl:
|
|
1951
|
if not tmpl:
|
|
1951
|
return logtemplatespec(None, None)
|
|
1952
|
return logtemplatespec(None, None)
|
|
1952
|
|
|
1953
|
|
|
1953
|
return formatter.lookuptemplate(ui, 'changeset', tmpl)
|
|
1954
|
return formatter.lookuptemplate(ui, 'changeset', tmpl)
|
|
1954
|
|
|
1955
|
|
|
1955
|
def makelogtemplater(ui, repo, tmpl, buffered=False):
|
|
1956
|
def makelogtemplater(ui, repo, tmpl, buffered=False):
|
|
1956
|
"""Create a changeset_templater from a literal template 'tmpl'
|
|
1957
|
"""Create a changeset_templater from a literal template 'tmpl'
|
|
1957
|
byte-string."""
|
|
1958
|
byte-string."""
|
|
1958
|
spec = logtemplatespec(tmpl, None)
|
|
1959
|
spec = logtemplatespec(tmpl, None)
|
|
1959
|
return changeset_templater(ui, repo, spec, buffered=buffered)
|
|
1960
|
return changeset_templater(ui, repo, spec, buffered=buffered)
|
|
1960
|
|
|
1961
|
|
|
1961
|
def show_changeset(ui, repo, opts, buffered=False):
|
|
1962
|
def show_changeset(ui, repo, opts, buffered=False):
|
|
1962
|
"""show one changeset using template or regular display.
|
|
1963
|
"""show one changeset using template or regular display.
|
|
1963
|
|
|
1964
|
|
|
1964
|
Display format will be the first non-empty hit of:
|
|
1965
|
Display format will be the first non-empty hit of:
|
|
1965
|
1. option 'template'
|
|
1966
|
1. option 'template'
|
|
1966
|
2. option 'style'
|
|
1967
|
2. option 'style'
|
|
1967
|
3. [ui] setting 'logtemplate'
|
|
1968
|
3. [ui] setting 'logtemplate'
|
|
1968
|
4. [ui] setting 'style'
|
|
1969
|
4. [ui] setting 'style'
|
|
1969
|
If all of these values are either the unset or the empty string,
|
|
1970
|
If all of these values are either the unset or the empty string,
|
|
1970
|
regular display via changeset_printer() is done.
|
|
1971
|
regular display via changeset_printer() is done.
|
|
1971
|
"""
|
|
1972
|
"""
|
|
1972
|
# options
|
|
1973
|
# options
|
|
1973
|
match = None
|
|
1974
|
match = None
|
|
1974
|
if opts.get('patch') or opts.get('stat'):
|
|
1975
|
if opts.get('patch') or opts.get('stat'):
|
|
1975
|
match = scmutil.matchall(repo)
|
|
1976
|
match = scmutil.matchall(repo)
|
|
1976
|
|
|
1977
|
|
|
1977
|
if opts.get('template') == 'json':
|
|
1978
|
if opts.get('template') == 'json':
|
|
1978
|
return jsonchangeset(ui, repo, match, opts, buffered)
|
|
1979
|
return jsonchangeset(ui, repo, match, opts, buffered)
|
|
1979
|
|
|
1980
|
|
|
1980
|
spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
|
|
1981
|
spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
|
|
1981
|
|
|
1982
|
|
|
1982
|
if not spec.ref and not spec.tmpl and not spec.mapfile:
|
|
1983
|
if not spec.ref and not spec.tmpl and not spec.mapfile:
|
|
1983
|
return changeset_printer(ui, repo, match, opts, buffered)
|
|
1984
|
return changeset_printer(ui, repo, match, opts, buffered)
|
|
1984
|
|
|
1985
|
|
|
1985
|
return changeset_templater(ui, repo, spec, match, opts, buffered)
|
|
1986
|
return changeset_templater(ui, repo, spec, match, opts, buffered)
|
|
1986
|
|
|
1987
|
|
|
1987
|
def showmarker(fm, marker, index=None):
|
|
1988
|
def showmarker(fm, marker, index=None):
|
|
1988
|
"""utility function to display obsolescence marker in a readable way
|
|
1989
|
"""utility function to display obsolescence marker in a readable way
|
|
1989
|
|
|
1990
|
|
|
1990
|
To be used by debug function."""
|
|
1991
|
To be used by debug function."""
|
|
1991
|
if index is not None:
|
|
1992
|
if index is not None:
|
|
1992
|
fm.write('index', '%i ', index)
|
|
1993
|
fm.write('index', '%i ', index)
|
|
1993
|
fm.write('prednode', '%s ', hex(marker.prednode()))
|
|
1994
|
fm.write('prednode', '%s ', hex(marker.prednode()))
|
|
1994
|
succs = marker.succnodes()
|
|
1995
|
succs = marker.succnodes()
|
|
1995
|
fm.condwrite(succs, 'succnodes', '%s ',
|
|
1996
|
fm.condwrite(succs, 'succnodes', '%s ',
|
|
1996
|
fm.formatlist(map(hex, succs), name='node'))
|
|
1997
|
fm.formatlist(map(hex, succs), name='node'))
|
|
1997
|
fm.write('flag', '%X ', marker.flags())
|
|
1998
|
fm.write('flag', '%X ', marker.flags())
|
|
1998
|
parents = marker.parentnodes()
|
|
1999
|
parents = marker.parentnodes()
|
|
1999
|
if parents is not None:
|
|
2000
|
if parents is not None:
|
|
2000
|
fm.write('parentnodes', '{%s} ',
|
|
2001
|
fm.write('parentnodes', '{%s} ',
|
|
2001
|
fm.formatlist(map(hex, parents), name='node', sep=', '))
|
|
2002
|
fm.formatlist(map(hex, parents), name='node', sep=', '))
|
|
2002
|
fm.write('date', '(%s) ', fm.formatdate(marker.date()))
|
|
2003
|
fm.write('date', '(%s) ', fm.formatdate(marker.date()))
|
|
2003
|
meta = marker.metadata().copy()
|
|
2004
|
meta = marker.metadata().copy()
|
|
2004
|
meta.pop('date', None)
|
|
2005
|
meta.pop('date', None)
|
|
2005
|
fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
|
|
2006
|
fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
|
|
2006
|
fm.plain('\n')
|
|
2007
|
fm.plain('\n')
|
|
2007
|
|
|
2008
|
|
|
2008
|
def finddate(ui, repo, date):
|
|
2009
|
def finddate(ui, repo, date):
|
|
2009
|
"""Find the tipmost changeset that matches the given date spec"""
|
|
2010
|
"""Find the tipmost changeset that matches the given date spec"""
|
|
2010
|
|
|
2011
|
|
|
2011
|
df = util.matchdate(date)
|
|
2012
|
df = util.matchdate(date)
|
|
2012
|
m = scmutil.matchall(repo)
|
|
2013
|
m = scmutil.matchall(repo)
|
|
2013
|
results = {}
|
|
2014
|
results = {}
|
|
2014
|
|
|
2015
|
|
|
2015
|
def prep(ctx, fns):
|
|
2016
|
def prep(ctx, fns):
|
|
2016
|
d = ctx.date()
|
|
2017
|
d = ctx.date()
|
|
2017
|
if df(d[0]):
|
|
2018
|
if df(d[0]):
|
|
2018
|
results[ctx.rev()] = d
|
|
2019
|
results[ctx.rev()] = d
|
|
2019
|
|
|
2020
|
|
|
2020
|
for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
|
|
2021
|
for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
|
|
2021
|
rev = ctx.rev()
|
|
2022
|
rev = ctx.rev()
|
|
2022
|
if rev in results:
|
|
2023
|
if rev in results:
|
|
2023
|
ui.status(_("found revision %s from %s\n") %
|
|
2024
|
ui.status(_("found revision %s from %s\n") %
|
|
2024
|
(rev, util.datestr(results[rev])))
|
|
2025
|
(rev, util.datestr(results[rev])))
|
|
2025
|
return '%d' % rev
|
|
2026
|
return '%d' % rev
|
|
2026
|
|
|
2027
|
|
|
2027
|
raise error.Abort(_("revision matching date not found"))
|
|
2028
|
raise error.Abort(_("revision matching date not found"))
|
|
2028
|
|
|
2029
|
|
|
2029
|
def increasingwindows(windowsize=8, sizelimit=512):
|
|
2030
|
def increasingwindows(windowsize=8, sizelimit=512):
|
|
2030
|
while True:
|
|
2031
|
while True:
|
|
2031
|
yield windowsize
|
|
2032
|
yield windowsize
|
|
2032
|
if windowsize < sizelimit:
|
|
2033
|
if windowsize < sizelimit:
|
|
2033
|
windowsize *= 2
|
|
2034
|
windowsize *= 2
|
|
2034
|
|
|
2035
|
|
|
2035
|
class FileWalkError(Exception):
|
|
2036
|
class FileWalkError(Exception):
|
|
2036
|
pass
|
|
2037
|
pass
|
|
2037
|
|
|
2038
|
|
|
2038
|
def walkfilerevs(repo, match, follow, revs, fncache):
|
|
2039
|
def walkfilerevs(repo, match, follow, revs, fncache):
|
|
2039
|
'''Walks the file history for the matched files.
|
|
2040
|
'''Walks the file history for the matched files.
|
|
2040
|
|
|
2041
|
|
|
2041
|
Returns the changeset revs that are involved in the file history.
|
|
2042
|
Returns the changeset revs that are involved in the file history.
|
|
2042
|
|
|
2043
|
|
|
2043
|
Throws FileWalkError if the file history can't be walked using
|
|
2044
|
Throws FileWalkError if the file history can't be walked using
|
|
2044
|
filelogs alone.
|
|
2045
|
filelogs alone.
|
|
2045
|
'''
|
|
2046
|
'''
|
|
2046
|
wanted = set()
|
|
2047
|
wanted = set()
|
|
2047
|
copies = []
|
|
2048
|
copies = []
|
|
2048
|
minrev, maxrev = min(revs), max(revs)
|
|
2049
|
minrev, maxrev = min(revs), max(revs)
|
|
2049
|
def filerevgen(filelog, last):
|
|
2050
|
def filerevgen(filelog, last):
|
|
2050
|
"""
|
|
2051
|
"""
|
|
2051
|
Only files, no patterns. Check the history of each file.
|
|
2052
|
Only files, no patterns. Check the history of each file.
|
|
2052
|
|
|
2053
|
|
|
2053
|
Examines filelog entries within minrev, maxrev linkrev range
|
|
2054
|
Examines filelog entries within minrev, maxrev linkrev range
|
|
2054
|
Returns an iterator yielding (linkrev, parentlinkrevs, copied)
|
|
2055
|
Returns an iterator yielding (linkrev, parentlinkrevs, copied)
|
|
2055
|
tuples in backwards order
|
|
2056
|
tuples in backwards order
|
|
2056
|
"""
|
|
2057
|
"""
|
|
2057
|
cl_count = len(repo)
|
|
2058
|
cl_count = len(repo)
|
|
2058
|
revs = []
|
|
2059
|
revs = []
|
|
2059
|
for j in xrange(0, last + 1):
|
|
2060
|
for j in xrange(0, last + 1):
|
|
2060
|
linkrev = filelog.linkrev(j)
|
|
2061
|
linkrev = filelog.linkrev(j)
|
|
2061
|
if linkrev < minrev:
|
|
2062
|
if linkrev < minrev:
|
|
2062
|
continue
|
|
2063
|
continue
|
|
2063
|
# only yield rev for which we have the changelog, it can
|
|
2064
|
# only yield rev for which we have the changelog, it can
|
|
2064
|
# happen while doing "hg log" during a pull or commit
|
|
2065
|
# happen while doing "hg log" during a pull or commit
|
|
2065
|
if linkrev >= cl_count:
|
|
2066
|
if linkrev >= cl_count:
|
|
2066
|
break
|
|
2067
|
break
|
|
2067
|
|
|
2068
|
|
|
2068
|
parentlinkrevs = []
|
|
2069
|
parentlinkrevs = []
|
|
2069
|
for p in filelog.parentrevs(j):
|
|
2070
|
for p in filelog.parentrevs(j):
|
|
2070
|
if p != nullrev:
|
|
2071
|
if p != nullrev:
|
|
2071
|
parentlinkrevs.append(filelog.linkrev(p))
|
|
2072
|
parentlinkrevs.append(filelog.linkrev(p))
|
|
2072
|
n = filelog.node(j)
|
|
2073
|
n = filelog.node(j)
|
|
2073
|
revs.append((linkrev, parentlinkrevs,
|
|
2074
|
revs.append((linkrev, parentlinkrevs,
|
|
2074
|
follow and filelog.renamed(n)))
|
|
2075
|
follow and filelog.renamed(n)))
|
|
2075
|
|
|
2076
|
|
|
2076
|
return reversed(revs)
|
|
2077
|
return reversed(revs)
|
|
2077
|
def iterfiles():
|
|
2078
|
def iterfiles():
|
|
2078
|
pctx = repo['.']
|
|
2079
|
pctx = repo['.']
|
|
2079
|
for filename in match.files():
|
|
2080
|
for filename in match.files():
|
|
2080
|
if follow:
|
|
2081
|
if follow:
|
|
2081
|
if filename not in pctx:
|
|
2082
|
if filename not in pctx:
|
|
2082
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2083
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2083
|
'revision: "%s"') % filename)
|
|
2084
|
'revision: "%s"') % filename)
|
|
2084
|
yield filename, pctx[filename].filenode()
|
|
2085
|
yield filename, pctx[filename].filenode()
|
|
2085
|
else:
|
|
2086
|
else:
|
|
2086
|
yield filename, None
|
|
2087
|
yield filename, None
|
|
2087
|
for filename_node in copies:
|
|
2088
|
for filename_node in copies:
|
|
2088
|
yield filename_node
|
|
2089
|
yield filename_node
|
|
2089
|
|
|
2090
|
|
|
2090
|
for file_, node in iterfiles():
|
|
2091
|
for file_, node in iterfiles():
|
|
2091
|
filelog = repo.file(file_)
|
|
2092
|
filelog = repo.file(file_)
|
|
2092
|
if not len(filelog):
|
|
2093
|
if not len(filelog):
|
|
2093
|
if node is None:
|
|
2094
|
if node is None:
|
|
2094
|
# A zero count may be a directory or deleted file, so
|
|
2095
|
# A zero count may be a directory or deleted file, so
|
|
2095
|
# try to find matching entries on the slow path.
|
|
2096
|
# try to find matching entries on the slow path.
|
|
2096
|
if follow:
|
|
2097
|
if follow:
|
|
2097
|
raise error.Abort(
|
|
2098
|
raise error.Abort(
|
|
2098
|
_('cannot follow nonexistent file: "%s"') % file_)
|
|
2099
|
_('cannot follow nonexistent file: "%s"') % file_)
|
|
2099
|
raise FileWalkError("Cannot walk via filelog")
|
|
2100
|
raise FileWalkError("Cannot walk via filelog")
|
|
2100
|
else:
|
|
2101
|
else:
|
|
2101
|
continue
|
|
2102
|
continue
|
|
2102
|
|
|
2103
|
|
|
2103
|
if node is None:
|
|
2104
|
if node is None:
|
|
2104
|
last = len(filelog) - 1
|
|
2105
|
last = len(filelog) - 1
|
|
2105
|
else:
|
|
2106
|
else:
|
|
2106
|
last = filelog.rev(node)
|
|
2107
|
last = filelog.rev(node)
|
|
2107
|
|
|
2108
|
|
|
2108
|
# keep track of all ancestors of the file
|
|
2109
|
# keep track of all ancestors of the file
|
|
2109
|
ancestors = {filelog.linkrev(last)}
|
|
2110
|
ancestors = {filelog.linkrev(last)}
|
|
2110
|
|
|
2111
|
|
|
2111
|
# iterate from latest to oldest revision
|
|
2112
|
# iterate from latest to oldest revision
|
|
2112
|
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
|
|
2113
|
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
|
|
2113
|
if not follow:
|
|
2114
|
if not follow:
|
|
2114
|
if rev > maxrev:
|
|
2115
|
if rev > maxrev:
|
|
2115
|
continue
|
|
2116
|
continue
|
|
2116
|
else:
|
|
2117
|
else:
|
|
2117
|
# Note that last might not be the first interesting
|
|
2118
|
# Note that last might not be the first interesting
|
|
2118
|
# rev to us:
|
|
2119
|
# rev to us:
|
|
2119
|
# if the file has been changed after maxrev, we'll
|
|
2120
|
# if the file has been changed after maxrev, we'll
|
|
2120
|
# have linkrev(last) > maxrev, and we still need
|
|
2121
|
# have linkrev(last) > maxrev, and we still need
|
|
2121
|
# to explore the file graph
|
|
2122
|
# to explore the file graph
|
|
2122
|
if rev not in ancestors:
|
|
2123
|
if rev not in ancestors:
|
|
2123
|
continue
|
|
2124
|
continue
|
|
2124
|
# XXX insert 1327 fix here
|
|
2125
|
# XXX insert 1327 fix here
|
|
2125
|
if flparentlinkrevs:
|
|
2126
|
if flparentlinkrevs:
|
|
2126
|
ancestors.update(flparentlinkrevs)
|
|
2127
|
ancestors.update(flparentlinkrevs)
|
|
2127
|
|
|
2128
|
|
|
2128
|
fncache.setdefault(rev, []).append(file_)
|
|
2129
|
fncache.setdefault(rev, []).append(file_)
|
|
2129
|
wanted.add(rev)
|
|
2130
|
wanted.add(rev)
|
|
2130
|
if copied:
|
|
2131
|
if copied:
|
|
2131
|
copies.append(copied)
|
|
2132
|
copies.append(copied)
|
|
2132
|
|
|
2133
|
|
|
2133
|
return wanted
|
|
2134
|
return wanted
|
|
2134
|
|
|
2135
|
|
|
2135
|
class _followfilter(object):
|
|
2136
|
class _followfilter(object):
|
|
2136
|
def __init__(self, repo, onlyfirst=False):
|
|
2137
|
def __init__(self, repo, onlyfirst=False):
|
|
2137
|
self.repo = repo
|
|
2138
|
self.repo = repo
|
|
2138
|
self.startrev = nullrev
|
|
2139
|
self.startrev = nullrev
|
|
2139
|
self.roots = set()
|
|
2140
|
self.roots = set()
|
|
2140
|
self.onlyfirst = onlyfirst
|
|
2141
|
self.onlyfirst = onlyfirst
|
|
2141
|
|
|
2142
|
|
|
2142
|
def match(self, rev):
|
|
2143
|
def match(self, rev):
|
|
2143
|
def realparents(rev):
|
|
2144
|
def realparents(rev):
|
|
2144
|
if self.onlyfirst:
|
|
2145
|
if self.onlyfirst:
|
|
2145
|
return self.repo.changelog.parentrevs(rev)[0:1]
|
|
2146
|
return self.repo.changelog.parentrevs(rev)[0:1]
|
|
2146
|
else:
|
|
2147
|
else:
|
|
2147
|
return filter(lambda x: x != nullrev,
|
|
2148
|
return filter(lambda x: x != nullrev,
|
|
2148
|
self.repo.changelog.parentrevs(rev))
|
|
2149
|
self.repo.changelog.parentrevs(rev))
|
|
2149
|
|
|
2150
|
|
|
2150
|
if self.startrev == nullrev:
|
|
2151
|
if self.startrev == nullrev:
|
|
2151
|
self.startrev = rev
|
|
2152
|
self.startrev = rev
|
|
2152
|
return True
|
|
2153
|
return True
|
|
2153
|
|
|
2154
|
|
|
2154
|
if rev > self.startrev:
|
|
2155
|
if rev > self.startrev:
|
|
2155
|
# forward: all descendants
|
|
2156
|
# forward: all descendants
|
|
2156
|
if not self.roots:
|
|
2157
|
if not self.roots:
|
|
2157
|
self.roots.add(self.startrev)
|
|
2158
|
self.roots.add(self.startrev)
|
|
2158
|
for parent in realparents(rev):
|
|
2159
|
for parent in realparents(rev):
|
|
2159
|
if parent in self.roots:
|
|
2160
|
if parent in self.roots:
|
|
2160
|
self.roots.add(rev)
|
|
2161
|
self.roots.add(rev)
|
|
2161
|
return True
|
|
2162
|
return True
|
|
2162
|
else:
|
|
2163
|
else:
|
|
2163
|
# backwards: all parents
|
|
2164
|
# backwards: all parents
|
|
2164
|
if not self.roots:
|
|
2165
|
if not self.roots:
|
|
2165
|
self.roots.update(realparents(self.startrev))
|
|
2166
|
self.roots.update(realparents(self.startrev))
|
|
2166
|
if rev in self.roots:
|
|
2167
|
if rev in self.roots:
|
|
2167
|
self.roots.remove(rev)
|
|
2168
|
self.roots.remove(rev)
|
|
2168
|
self.roots.update(realparents(rev))
|
|
2169
|
self.roots.update(realparents(rev))
|
|
2169
|
return True
|
|
2170
|
return True
|
|
2170
|
|
|
2171
|
|
|
2171
|
return False
|
|
2172
|
return False
|
|
2172
|
|
|
2173
|
|
|
2173
|
def walkchangerevs(repo, match, opts, prepare):
|
|
2174
|
def walkchangerevs(repo, match, opts, prepare):
|
|
2174
|
'''Iterate over files and the revs in which they changed.
|
|
2175
|
'''Iterate over files and the revs in which they changed.
|
|
2175
|
|
|
2176
|
|
|
2176
|
Callers most commonly need to iterate backwards over the history
|
|
2177
|
Callers most commonly need to iterate backwards over the history
|
|
2177
|
in which they are interested. Doing so has awful (quadratic-looking)
|
|
2178
|
in which they are interested. Doing so has awful (quadratic-looking)
|
|
2178
|
performance, so we use iterators in a "windowed" way.
|
|
2179
|
performance, so we use iterators in a "windowed" way.
|
|
2179
|
|
|
2180
|
|
|
2180
|
We walk a window of revisions in the desired order. Within the
|
|
2181
|
We walk a window of revisions in the desired order. Within the
|
|
2181
|
window, we first walk forwards to gather data, then in the desired
|
|
2182
|
window, we first walk forwards to gather data, then in the desired
|
|
2182
|
order (usually backwards) to display it.
|
|
2183
|
order (usually backwards) to display it.
|
|
2183
|
|
|
2184
|
|
|
2184
|
This function returns an iterator yielding contexts. Before
|
|
2185
|
This function returns an iterator yielding contexts. Before
|
|
2185
|
yielding each context, the iterator will first call the prepare
|
|
2186
|
yielding each context, the iterator will first call the prepare
|
|
2186
|
function on each context in the window in forward order.'''
|
|
2187
|
function on each context in the window in forward order.'''
|
|
2187
|
|
|
2188
|
|
|
2188
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2189
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2189
|
revs = _logrevs(repo, opts)
|
|
2190
|
revs = _logrevs(repo, opts)
|
|
2190
|
if not revs:
|
|
2191
|
if not revs:
|
|
2191
|
return []
|
|
2192
|
return []
|
|
2192
|
wanted = set()
|
|
2193
|
wanted = set()
|
|
2193
|
slowpath = match.anypats() or (not match.always() and opts.get('removed'))
|
|
2194
|
slowpath = match.anypats() or (not match.always() and opts.get('removed'))
|
|
2194
|
fncache = {}
|
|
2195
|
fncache = {}
|
|
2195
|
change = repo.changectx
|
|
2196
|
change = repo.changectx
|
|
2196
|
|
|
2197
|
|
|
2197
|
# First step is to fill wanted, the set of revisions that we want to yield.
|
|
2198
|
# First step is to fill wanted, the set of revisions that we want to yield.
|
|
2198
|
# When it does not induce extra cost, we also fill fncache for revisions in
|
|
2199
|
# When it does not induce extra cost, we also fill fncache for revisions in
|
|
2199
|
# wanted: a cache of filenames that were changed (ctx.files()) and that
|
|
2200
|
# wanted: a cache of filenames that were changed (ctx.files()) and that
|
|
2200
|
# match the file filtering conditions.
|
|
2201
|
# match the file filtering conditions.
|
|
2201
|
|
|
2202
|
|
|
2202
|
if match.always():
|
|
2203
|
if match.always():
|
|
2203
|
# No files, no patterns. Display all revs.
|
|
2204
|
# No files, no patterns. Display all revs.
|
|
2204
|
wanted = revs
|
|
2205
|
wanted = revs
|
|
2205
|
elif not slowpath:
|
|
2206
|
elif not slowpath:
|
|
2206
|
# We only have to read through the filelog to find wanted revisions
|
|
2207
|
# We only have to read through the filelog to find wanted revisions
|
|
2207
|
|
|
2208
|
|
|
2208
|
try:
|
|
2209
|
try:
|
|
2209
|
wanted = walkfilerevs(repo, match, follow, revs, fncache)
|
|
2210
|
wanted = walkfilerevs(repo, match, follow, revs, fncache)
|
|
2210
|
except FileWalkError:
|
|
2211
|
except FileWalkError:
|
|
2211
|
slowpath = True
|
|
2212
|
slowpath = True
|
|
2212
|
|
|
2213
|
|
|
2213
|
# We decided to fall back to the slowpath because at least one
|
|
2214
|
# We decided to fall back to the slowpath because at least one
|
|
2214
|
# of the paths was not a file. Check to see if at least one of them
|
|
2215
|
# of the paths was not a file. Check to see if at least one of them
|
|
2215
|
# existed in history, otherwise simply return
|
|
2216
|
# existed in history, otherwise simply return
|
|
2216
|
for path in match.files():
|
|
2217
|
for path in match.files():
|
|
2217
|
if path == '.' or path in repo.store:
|
|
2218
|
if path == '.' or path in repo.store:
|
|
2218
|
break
|
|
2219
|
break
|
|
2219
|
else:
|
|
2220
|
else:
|
|
2220
|
return []
|
|
2221
|
return []
|
|
2221
|
|
|
2222
|
|
|
2222
|
if slowpath:
|
|
2223
|
if slowpath:
|
|
2223
|
# We have to read the changelog to match filenames against
|
|
2224
|
# We have to read the changelog to match filenames against
|
|
2224
|
# changed files
|
|
2225
|
# changed files
|
|
2225
|
|
|
2226
|
|
|
2226
|
if follow:
|
|
2227
|
if follow:
|
|
2227
|
raise error.Abort(_('can only follow copies/renames for explicit '
|
|
2228
|
raise error.Abort(_('can only follow copies/renames for explicit '
|
|
2228
|
'filenames'))
|
|
2229
|
'filenames'))
|
|
2229
|
|
|
2230
|
|
|
2230
|
# The slow path checks files modified in every changeset.
|
|
2231
|
# The slow path checks files modified in every changeset.
|
|
2231
|
# This is really slow on large repos, so compute the set lazily.
|
|
2232
|
# This is really slow on large repos, so compute the set lazily.
|
|
2232
|
class lazywantedset(object):
|
|
2233
|
class lazywantedset(object):
|
|
2233
|
def __init__(self):
|
|
2234
|
def __init__(self):
|
|
2234
|
self.set = set()
|
|
2235
|
self.set = set()
|
|
2235
|
self.revs = set(revs)
|
|
2236
|
self.revs = set(revs)
|
|
2236
|
|
|
2237
|
|
|
2237
|
# No need to worry about locality here because it will be accessed
|
|
2238
|
# No need to worry about locality here because it will be accessed
|
|
2238
|
# in the same order as the increasing window below.
|
|
2239
|
# in the same order as the increasing window below.
|
|
2239
|
def __contains__(self, value):
|
|
2240
|
def __contains__(self, value):
|
|
2240
|
if value in self.set:
|
|
2241
|
if value in self.set:
|
|
2241
|
return True
|
|
2242
|
return True
|
|
2242
|
elif not value in self.revs:
|
|
2243
|
elif not value in self.revs:
|
|
2243
|
return False
|
|
2244
|
return False
|
|
2244
|
else:
|
|
2245
|
else:
|
|
2245
|
self.revs.discard(value)
|
|
2246
|
self.revs.discard(value)
|
|
2246
|
ctx = change(value)
|
|
2247
|
ctx = change(value)
|
|
2247
|
matches = filter(match, ctx.files())
|
|
2248
|
matches = filter(match, ctx.files())
|
|
2248
|
if matches:
|
|
2249
|
if matches:
|
|
2249
|
fncache[value] = matches
|
|
2250
|
fncache[value] = matches
|
|
2250
|
self.set.add(value)
|
|
2251
|
self.set.add(value)
|
|
2251
|
return True
|
|
2252
|
return True
|
|
2252
|
return False
|
|
2253
|
return False
|
|
2253
|
|
|
2254
|
|
|
2254
|
def discard(self, value):
|
|
2255
|
def discard(self, value):
|
|
2255
|
self.revs.discard(value)
|
|
2256
|
self.revs.discard(value)
|
|
2256
|
self.set.discard(value)
|
|
2257
|
self.set.discard(value)
|
|
2257
|
|
|
2258
|
|
|
2258
|
wanted = lazywantedset()
|
|
2259
|
wanted = lazywantedset()
|
|
2259
|
|
|
2260
|
|
|
2260
|
# it might be worthwhile to do this in the iterator if the rev range
|
|
2261
|
# it might be worthwhile to do this in the iterator if the rev range
|
|
2261
|
# is descending and the prune args are all within that range
|
|
2262
|
# is descending and the prune args are all within that range
|
|
2262
|
for rev in opts.get('prune', ()):
|
|
2263
|
for rev in opts.get('prune', ()):
|
|
2263
|
rev = repo[rev].rev()
|
|
2264
|
rev = repo[rev].rev()
|
|
2264
|
ff = _followfilter(repo)
|
|
2265
|
ff = _followfilter(repo)
|
|
2265
|
stop = min(revs[0], revs[-1])
|
|
2266
|
stop = min(revs[0], revs[-1])
|
|
2266
|
for x in xrange(rev, stop - 1, -1):
|
|
2267
|
for x in xrange(rev, stop - 1, -1):
|
|
2267
|
if ff.match(x):
|
|
2268
|
if ff.match(x):
|
|
2268
|
wanted = wanted - [x]
|
|
2269
|
wanted = wanted - [x]
|
|
2269
|
|
|
2270
|
|
|
2270
|
# Now that wanted is correctly initialized, we can iterate over the
|
|
2271
|
# Now that wanted is correctly initialized, we can iterate over the
|
|
2271
|
# revision range, yielding only revisions in wanted.
|
|
2272
|
# revision range, yielding only revisions in wanted.
|
|
2272
|
def iterate():
|
|
2273
|
def iterate():
|
|
2273
|
if follow and match.always():
|
|
2274
|
if follow and match.always():
|
|
2274
|
ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
|
|
2275
|
ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
|
|
2275
|
def want(rev):
|
|
2276
|
def want(rev):
|
|
2276
|
return ff.match(rev) and rev in wanted
|
|
2277
|
return ff.match(rev) and rev in wanted
|
|
2277
|
else:
|
|
2278
|
else:
|
|
2278
|
def want(rev):
|
|
2279
|
def want(rev):
|
|
2279
|
return rev in wanted
|
|
2280
|
return rev in wanted
|
|
2280
|
|
|
2281
|
|
|
2281
|
it = iter(revs)
|
|
2282
|
it = iter(revs)
|
|
2282
|
stopiteration = False
|
|
2283
|
stopiteration = False
|
|
2283
|
for windowsize in increasingwindows():
|
|
2284
|
for windowsize in increasingwindows():
|
|
2284
|
nrevs = []
|
|
2285
|
nrevs = []
|
|
2285
|
for i in xrange(windowsize):
|
|
2286
|
for i in xrange(windowsize):
|
|
2286
|
rev = next(it, None)
|
|
2287
|
rev = next(it, None)
|
|
2287
|
if rev is None:
|
|
2288
|
if rev is None:
|
|
2288
|
stopiteration = True
|
|
2289
|
stopiteration = True
|
|
2289
|
break
|
|
2290
|
break
|
|
2290
|
elif want(rev):
|
|
2291
|
elif want(rev):
|
|
2291
|
nrevs.append(rev)
|
|
2292
|
nrevs.append(rev)
|
|
2292
|
for rev in sorted(nrevs):
|
|
2293
|
for rev in sorted(nrevs):
|
|
2293
|
fns = fncache.get(rev)
|
|
2294
|
fns = fncache.get(rev)
|
|
2294
|
ctx = change(rev)
|
|
2295
|
ctx = change(rev)
|
|
2295
|
if not fns:
|
|
2296
|
if not fns:
|
|
2296
|
def fns_generator():
|
|
2297
|
def fns_generator():
|
|
2297
|
for f in ctx.files():
|
|
2298
|
for f in ctx.files():
|
|
2298
|
if match(f):
|
|
2299
|
if match(f):
|
|
2299
|
yield f
|
|
2300
|
yield f
|
|
2300
|
fns = fns_generator()
|
|
2301
|
fns = fns_generator()
|
|
2301
|
prepare(ctx, fns)
|
|
2302
|
prepare(ctx, fns)
|
|
2302
|
for rev in nrevs:
|
|
2303
|
for rev in nrevs:
|
|
2303
|
yield change(rev)
|
|
2304
|
yield change(rev)
|
|
2304
|
|
|
2305
|
|
|
2305
|
if stopiteration:
|
|
2306
|
if stopiteration:
|
|
2306
|
break
|
|
2307
|
break
|
|
2307
|
|
|
2308
|
|
|
2308
|
return iterate()
|
|
2309
|
return iterate()
|
|
2309
|
|
|
2310
|
|
|
2310
|
def _makefollowlogfilematcher(repo, files, followfirst):
|
|
2311
|
def _makefollowlogfilematcher(repo, files, followfirst):
|
|
2311
|
# When displaying a revision with --patch --follow FILE, we have
|
|
2312
|
# When displaying a revision with --patch --follow FILE, we have
|
|
2312
|
# to know which file of the revision must be diffed. With
|
|
2313
|
# to know which file of the revision must be diffed. With
|
|
2313
|
# --follow, we want the names of the ancestors of FILE in the
|
|
2314
|
# --follow, we want the names of the ancestors of FILE in the
|
|
2314
|
# revision, stored in "fcache". "fcache" is populated by
|
|
2315
|
# revision, stored in "fcache". "fcache" is populated by
|
|
2315
|
# reproducing the graph traversal already done by --follow revset
|
|
2316
|
# reproducing the graph traversal already done by --follow revset
|
|
2316
|
# and relating revs to file names (which is not "correct" but
|
|
2317
|
# and relating revs to file names (which is not "correct" but
|
|
2317
|
# good enough).
|
|
2318
|
# good enough).
|
|
2318
|
fcache = {}
|
|
2319
|
fcache = {}
|
|
2319
|
fcacheready = [False]
|
|
2320
|
fcacheready = [False]
|
|
2320
|
pctx = repo['.']
|
|
2321
|
pctx = repo['.']
|
|
2321
|
|
|
2322
|
|
|
2322
|
def populate():
|
|
2323
|
def populate():
|
|
2323
|
for fn in files:
|
|
2324
|
for fn in files:
|
|
2324
|
fctx = pctx[fn]
|
|
2325
|
fctx = pctx[fn]
|
|
2325
|
fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
|
|
2326
|
fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
|
|
2326
|
for c in fctx.ancestors(followfirst=followfirst):
|
|
2327
|
for c in fctx.ancestors(followfirst=followfirst):
|
|
2327
|
fcache.setdefault(c.rev(), set()).add(c.path())
|
|
2328
|
fcache.setdefault(c.rev(), set()).add(c.path())
|
|
2328
|
|
|
2329
|
|
|
2329
|
def filematcher(rev):
|
|
2330
|
def filematcher(rev):
|
|
2330
|
if not fcacheready[0]:
|
|
2331
|
if not fcacheready[0]:
|
|
2331
|
# Lazy initialization
|
|
2332
|
# Lazy initialization
|
|
2332
|
fcacheready[0] = True
|
|
2333
|
fcacheready[0] = True
|
|
2333
|
populate()
|
|
2334
|
populate()
|
|
2334
|
return scmutil.matchfiles(repo, fcache.get(rev, []))
|
|
2335
|
return scmutil.matchfiles(repo, fcache.get(rev, []))
|
|
2335
|
|
|
2336
|
|
|
2336
|
return filematcher
|
|
2337
|
return filematcher
|
|
2337
|
|
|
2338
|
|
|
2338
|
def _makenofollowlogfilematcher(repo, pats, opts):
|
|
2339
|
def _makenofollowlogfilematcher(repo, pats, opts):
|
|
2339
|
'''hook for extensions to override the filematcher for non-follow cases'''
|
|
2340
|
'''hook for extensions to override the filematcher for non-follow cases'''
|
|
2340
|
return None
|
|
2341
|
return None
|
|
2341
|
|
|
2342
|
|
|
2342
|
def _makelogrevset(repo, pats, opts, revs):
|
|
2343
|
def _makelogrevset(repo, pats, opts, revs):
|
|
2343
|
"""Return (expr, filematcher) where expr is a revset string built
|
|
2344
|
"""Return (expr, filematcher) where expr is a revset string built
|
|
2344
|
from log options and file patterns or None. If --stat or --patch
|
|
2345
|
from log options and file patterns or None. If --stat or --patch
|
|
2345
|
are not passed filematcher is None. Otherwise it is a callable
|
|
2346
|
are not passed filematcher is None. Otherwise it is a callable
|
|
2346
|
taking a revision number and returning a match objects filtering
|
|
2347
|
taking a revision number and returning a match objects filtering
|
|
2347
|
the files to be detailed when displaying the revision.
|
|
2348
|
the files to be detailed when displaying the revision.
|
|
2348
|
"""
|
|
2349
|
"""
|
|
2349
|
opt2revset = {
|
|
2350
|
opt2revset = {
|
|
2350
|
'no_merges': ('not merge()', None),
|
|
2351
|
'no_merges': ('not merge()', None),
|
|
2351
|
'only_merges': ('merge()', None),
|
|
2352
|
'only_merges': ('merge()', None),
|
|
2352
|
'_ancestors': ('ancestors(%(val)s)', None),
|
|
2353
|
'_ancestors': ('ancestors(%(val)s)', None),
|
|
2353
|
'_fancestors': ('_firstancestors(%(val)s)', None),
|
|
2354
|
'_fancestors': ('_firstancestors(%(val)s)', None),
|
|
2354
|
'_descendants': ('descendants(%(val)s)', None),
|
|
2355
|
'_descendants': ('descendants(%(val)s)', None),
|
|
2355
|
'_fdescendants': ('_firstdescendants(%(val)s)', None),
|
|
2356
|
'_fdescendants': ('_firstdescendants(%(val)s)', None),
|
|
2356
|
'_matchfiles': ('_matchfiles(%(val)s)', None),
|
|
2357
|
'_matchfiles': ('_matchfiles(%(val)s)', None),
|
|
2357
|
'date': ('date(%(val)r)', None),
|
|
2358
|
'date': ('date(%(val)r)', None),
|
|
2358
|
'branch': ('branch(%(val)r)', ' or '),
|
|
2359
|
'branch': ('branch(%(val)r)', ' or '),
|
|
2359
|
'_patslog': ('filelog(%(val)r)', ' or '),
|
|
2360
|
'_patslog': ('filelog(%(val)r)', ' or '),
|
|
2360
|
'_patsfollow': ('follow(%(val)r)', ' or '),
|
|
2361
|
'_patsfollow': ('follow(%(val)r)', ' or '),
|
|
2361
|
'_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
|
|
2362
|
'_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
|
|
2362
|
'keyword': ('keyword(%(val)r)', ' or '),
|
|
2363
|
'keyword': ('keyword(%(val)r)', ' or '),
|
|
2363
|
'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
|
|
2364
|
'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
|
|
2364
|
'user': ('user(%(val)r)', ' or '),
|
|
2365
|
'user': ('user(%(val)r)', ' or '),
|
|
2365
|
}
|
|
2366
|
}
|
|
2366
|
|
|
2367
|
|
|
2367
|
opts = dict(opts)
|
|
2368
|
opts = dict(opts)
|
|
2368
|
# follow or not follow?
|
|
2369
|
# follow or not follow?
|
|
2369
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2370
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2370
|
if opts.get('follow_first'):
|
|
2371
|
if opts.get('follow_first'):
|
|
2371
|
followfirst = 1
|
|
2372
|
followfirst = 1
|
|
2372
|
else:
|
|
2373
|
else:
|
|
2373
|
followfirst = 0
|
|
2374
|
followfirst = 0
|
|
2374
|
# --follow with FILE behavior depends on revs...
|
|
2375
|
# --follow with FILE behavior depends on revs...
|
|
2375
|
it = iter(revs)
|
|
2376
|
it = iter(revs)
|
|
2376
|
startrev = next(it)
|
|
2377
|
startrev = next(it)
|
|
2377
|
followdescendants = startrev < next(it, startrev)
|
|
2378
|
followdescendants = startrev < next(it, startrev)
|
|
2378
|
|
|
2379
|
|
|
2379
|
# branch and only_branch are really aliases and must be handled at
|
|
2380
|
# branch and only_branch are really aliases and must be handled at
|
|
2380
|
# the same time
|
|
2381
|
# the same time
|
|
2381
|
opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
|
|
2382
|
opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
|
|
2382
|
opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
|
|
2383
|
opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
|
|
2383
|
# pats/include/exclude are passed to match.match() directly in
|
|
2384
|
# pats/include/exclude are passed to match.match() directly in
|
|
2384
|
# _matchfiles() revset but walkchangerevs() builds its matcher with
|
|
2385
|
# _matchfiles() revset but walkchangerevs() builds its matcher with
|
|
2385
|
# scmutil.match(). The difference is input pats are globbed on
|
|
2386
|
# scmutil.match(). The difference is input pats are globbed on
|
|
2386
|
# platforms without shell expansion (windows).
|
|
2387
|
# platforms without shell expansion (windows).
|
|
2387
|
wctx = repo[None]
|
|
2388
|
wctx = repo[None]
|
|
2388
|
match, pats = scmutil.matchandpats(wctx, pats, opts)
|
|
2389
|
match, pats = scmutil.matchandpats(wctx, pats, opts)
|
|
2389
|
slowpath = match.anypats() or (not match.always() and opts.get('removed'))
|
|
2390
|
slowpath = match.anypats() or (not match.always() and opts.get('removed'))
|
|
2390
|
if not slowpath:
|
|
2391
|
if not slowpath:
|
|
2391
|
for f in match.files():
|
|
2392
|
for f in match.files():
|
|
2392
|
if follow and f not in wctx:
|
|
2393
|
if follow and f not in wctx:
|
|
2393
|
# If the file exists, it may be a directory, so let it
|
|
2394
|
# If the file exists, it may be a directory, so let it
|
|
2394
|
# take the slow path.
|
|
2395
|
# take the slow path.
|
|
2395
|
if os.path.exists(repo.wjoin(f)):
|
|
2396
|
if os.path.exists(repo.wjoin(f)):
|
|
2396
|
slowpath = True
|
|
2397
|
slowpath = True
|
|
2397
|
continue
|
|
2398
|
continue
|
|
2398
|
else:
|
|
2399
|
else:
|
|
2399
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2400
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2400
|
'revision: "%s"') % f)
|
|
2401
|
'revision: "%s"') % f)
|
|
2401
|
filelog = repo.file(f)
|
|
2402
|
filelog = repo.file(f)
|
|
2402
|
if not filelog:
|
|
2403
|
if not filelog:
|
|
2403
|
# A zero count may be a directory or deleted file, so
|
|
2404
|
# A zero count may be a directory or deleted file, so
|
|
2404
|
# try to find matching entries on the slow path.
|
|
2405
|
# try to find matching entries on the slow path.
|
|
2405
|
if follow:
|
|
2406
|
if follow:
|
|
2406
|
raise error.Abort(
|
|
2407
|
raise error.Abort(
|
|
2407
|
_('cannot follow nonexistent file: "%s"') % f)
|
|
2408
|
_('cannot follow nonexistent file: "%s"') % f)
|
|
2408
|
slowpath = True
|
|
2409
|
slowpath = True
|
|
2409
|
|
|
2410
|
|
|
2410
|
# We decided to fall back to the slowpath because at least one
|
|
2411
|
# We decided to fall back to the slowpath because at least one
|
|
2411
|
# of the paths was not a file. Check to see if at least one of them
|
|
2412
|
# of the paths was not a file. Check to see if at least one of them
|
|
2412
|
# existed in history - in that case, we'll continue down the
|
|
2413
|
# existed in history - in that case, we'll continue down the
|
|
2413
|
# slowpath; otherwise, we can turn off the slowpath
|
|
2414
|
# slowpath; otherwise, we can turn off the slowpath
|
|
2414
|
if slowpath:
|
|
2415
|
if slowpath:
|
|
2415
|
for path in match.files():
|
|
2416
|
for path in match.files():
|
|
2416
|
if path == '.' or path in repo.store:
|
|
2417
|
if path == '.' or path in repo.store:
|
|
2417
|
break
|
|
2418
|
break
|
|
2418
|
else:
|
|
2419
|
else:
|
|
2419
|
slowpath = False
|
|
2420
|
slowpath = False
|
|
2420
|
|
|
2421
|
|
|
2421
|
fpats = ('_patsfollow', '_patsfollowfirst')
|
|
2422
|
fpats = ('_patsfollow', '_patsfollowfirst')
|
|
2422
|
fnopats = (('_ancestors', '_fancestors'),
|
|
2423
|
fnopats = (('_ancestors', '_fancestors'),
|
|
2423
|
('_descendants', '_fdescendants'))
|
|
2424
|
('_descendants', '_fdescendants'))
|
|
2424
|
if slowpath:
|
|
2425
|
if slowpath:
|
|
2425
|
# See walkchangerevs() slow path.
|
|
2426
|
# See walkchangerevs() slow path.
|
|
2426
|
#
|
|
2427
|
#
|
|
2427
|
# pats/include/exclude cannot be represented as separate
|
|
2428
|
# pats/include/exclude cannot be represented as separate
|
|
2428
|
# revset expressions as their filtering logic applies at file
|
|
2429
|
# revset expressions as their filtering logic applies at file
|
|
2429
|
# level. For instance "-I a -X a" matches a revision touching
|
|
2430
|
# level. For instance "-I a -X a" matches a revision touching
|
|
2430
|
# "a" and "b" while "file(a) and not file(b)" does
|
|
2431
|
# "a" and "b" while "file(a) and not file(b)" does
|
|
2431
|
# not. Besides, filesets are evaluated against the working
|
|
2432
|
# not. Besides, filesets are evaluated against the working
|
|
2432
|
# directory.
|
|
2433
|
# directory.
|
|
2433
|
matchargs = ['r:', 'd:relpath']
|
|
2434
|
matchargs = ['r:', 'd:relpath']
|
|
2434
|
for p in pats:
|
|
2435
|
for p in pats:
|
|
2435
|
matchargs.append('p:' + p)
|
|
2436
|
matchargs.append('p:' + p)
|
|
2436
|
for p in opts.get('include', []):
|
|
2437
|
for p in opts.get('include', []):
|
|
2437
|
matchargs.append('i:' + p)
|
|
2438
|
matchargs.append('i:' + p)
|
|
2438
|
for p in opts.get('exclude', []):
|
|
2439
|
for p in opts.get('exclude', []):
|
|
2439
|
matchargs.append('x:' + p)
|
|
2440
|
matchargs.append('x:' + p)
|
|
2440
|
matchargs = ','.join(('%r' % p) for p in matchargs)
|
|
2441
|
matchargs = ','.join(('%r' % p) for p in matchargs)
|
|
2441
|
opts['_matchfiles'] = matchargs
|
|
2442
|
opts['_matchfiles'] = matchargs
|
|
2442
|
if follow:
|
|
2443
|
if follow:
|
|
2443
|
opts[fnopats[0][followfirst]] = '.'
|
|
2444
|
opts[fnopats[0][followfirst]] = '.'
|
|
2444
|
else:
|
|
2445
|
else:
|
|
2445
|
if follow:
|
|
2446
|
if follow:
|
|
2446
|
if pats:
|
|
2447
|
if pats:
|
|
2447
|
# follow() revset interprets its file argument as a
|
|
2448
|
# follow() revset interprets its file argument as a
|
|
2448
|
# manifest entry, so use match.files(), not pats.
|
|
2449
|
# manifest entry, so use match.files(), not pats.
|
|
2449
|
opts[fpats[followfirst]] = list(match.files())
|
|
2450
|
opts[fpats[followfirst]] = list(match.files())
|
|
2450
|
else:
|
|
2451
|
else:
|
|
2451
|
op = fnopats[followdescendants][followfirst]
|
|
2452
|
op = fnopats[followdescendants][followfirst]
|
|
2452
|
opts[op] = 'rev(%d)' % startrev
|
|
2453
|
opts[op] = 'rev(%d)' % startrev
|
|
2453
|
else:
|
|
2454
|
else:
|
|
2454
|
opts['_patslog'] = list(pats)
|
|
2455
|
opts['_patslog'] = list(pats)
|
|
2455
|
|
|
2456
|
|
|
2456
|
filematcher = None
|
|
2457
|
filematcher = None
|
|
2457
|
if opts.get('patch') or opts.get('stat'):
|
|
2458
|
if opts.get('patch') or opts.get('stat'):
|
|
2458
|
# When following files, track renames via a special matcher.
|
|
2459
|
# When following files, track renames via a special matcher.
|
|
2459
|
# If we're forced to take the slowpath it means we're following
|
|
2460
|
# If we're forced to take the slowpath it means we're following
|
|
2460
|
# at least one pattern/directory, so don't bother with rename tracking.
|
|
2461
|
# at least one pattern/directory, so don't bother with rename tracking.
|
|
2461
|
if follow and not match.always() and not slowpath:
|
|
2462
|
if follow and not match.always() and not slowpath:
|
|
2462
|
# _makefollowlogfilematcher expects its files argument to be
|
|
2463
|
# _makefollowlogfilematcher expects its files argument to be
|
|
2463
|
# relative to the repo root, so use match.files(), not pats.
|
|
2464
|
# relative to the repo root, so use match.files(), not pats.
|
|
2464
|
filematcher = _makefollowlogfilematcher(repo, match.files(),
|
|
2465
|
filematcher = _makefollowlogfilematcher(repo, match.files(),
|
|
2465
|
followfirst)
|
|
2466
|
followfirst)
|
|
2466
|
else:
|
|
2467
|
else:
|
|
2467
|
filematcher = _makenofollowlogfilematcher(repo, pats, opts)
|
|
2468
|
filematcher = _makenofollowlogfilematcher(repo, pats, opts)
|
|
2468
|
if filematcher is None:
|
|
2469
|
if filematcher is None:
|
|
2469
|
filematcher = lambda rev: match
|
|
2470
|
filematcher = lambda rev: match
|
|
2470
|
|
|
2471
|
|
|
2471
|
expr = []
|
|
2472
|
expr = []
|
|
2472
|
for op, val in sorted(opts.iteritems()):
|
|
2473
|
for op, val in sorted(opts.iteritems()):
|
|
2473
|
if not val:
|
|
2474
|
if not val:
|
|
2474
|
continue
|
|
2475
|
continue
|
|
2475
|
if op not in opt2revset:
|
|
2476
|
if op not in opt2revset:
|
|
2476
|
continue
|
|
2477
|
continue
|
|
2477
|
revop, andor = opt2revset[op]
|
|
2478
|
revop, andor = opt2revset[op]
|
|
2478
|
if '%(val)' not in revop:
|
|
2479
|
if '%(val)' not in revop:
|
|
2479
|
expr.append(revop)
|
|
2480
|
expr.append(revop)
|
|
2480
|
else:
|
|
2481
|
else:
|
|
2481
|
if not isinstance(val, list):
|
|
2482
|
if not isinstance(val, list):
|
|
2482
|
e = revop % {'val': val}
|
|
2483
|
e = revop % {'val': val}
|
|
2483
|
else:
|
|
2484
|
else:
|
|
2484
|
e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
|
|
2485
|
e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
|
|
2485
|
expr.append(e)
|
|
2486
|
expr.append(e)
|
|
2486
|
|
|
2487
|
|
|
2487
|
if expr:
|
|
2488
|
if expr:
|
|
2488
|
expr = '(' + ' and '.join(expr) + ')'
|
|
2489
|
expr = '(' + ' and '.join(expr) + ')'
|
|
2489
|
else:
|
|
2490
|
else:
|
|
2490
|
expr = None
|
|
2491
|
expr = None
|
|
2491
|
return expr, filematcher
|
|
2492
|
return expr, filematcher
|
|
2492
|
|
|
2493
|
|
|
2493
|
def _logrevs(repo, opts):
|
|
2494
|
def _logrevs(repo, opts):
|
|
2494
|
# Default --rev value depends on --follow but --follow behavior
|
|
2495
|
# Default --rev value depends on --follow but --follow behavior
|
|
2495
|
# depends on revisions resolved from --rev...
|
|
2496
|
# depends on revisions resolved from --rev...
|
|
2496
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2497
|
follow = opts.get('follow') or opts.get('follow_first')
|
|
2497
|
if opts.get('rev'):
|
|
2498
|
if opts.get('rev'):
|
|
2498
|
revs = scmutil.revrange(repo, opts['rev'])
|
|
2499
|
revs = scmutil.revrange(repo, opts['rev'])
|
|
2499
|
elif follow and repo.dirstate.p1() == nullid:
|
|
2500
|
elif follow and repo.dirstate.p1() == nullid:
|
|
2500
|
revs = smartset.baseset()
|
|
2501
|
revs = smartset.baseset()
|
|
2501
|
elif follow:
|
|
2502
|
elif follow:
|
|
2502
|
revs = repo.revs('reverse(:.)')
|
|
2503
|
revs = repo.revs('reverse(:.)')
|
|
2503
|
else:
|
|
2504
|
else:
|
|
2504
|
revs = smartset.spanset(repo)
|
|
2505
|
revs = smartset.spanset(repo)
|
|
2505
|
revs.reverse()
|
|
2506
|
revs.reverse()
|
|
2506
|
return revs
|
|
2507
|
return revs
|
|
2507
|
|
|
2508
|
|
|
2508
|
def getgraphlogrevs(repo, pats, opts):
|
|
2509
|
def getgraphlogrevs(repo, pats, opts):
|
|
2509
|
"""Return (revs, expr, filematcher) where revs is an iterable of
|
|
2510
|
"""Return (revs, expr, filematcher) where revs is an iterable of
|
|
2510
|
revision numbers, expr is a revset string built from log options
|
|
2511
|
revision numbers, expr is a revset string built from log options
|
|
2511
|
and file patterns or None, and used to filter 'revs'. If --stat or
|
|
2512
|
and file patterns or None, and used to filter 'revs'. If --stat or
|
|
2512
|
--patch are not passed filematcher is None. Otherwise it is a
|
|
2513
|
--patch are not passed filematcher is None. Otherwise it is a
|
|
2513
|
callable taking a revision number and returning a match objects
|
|
2514
|
callable taking a revision number and returning a match objects
|
|
2514
|
filtering the files to be detailed when displaying the revision.
|
|
2515
|
filtering the files to be detailed when displaying the revision.
|
|
2515
|
"""
|
|
2516
|
"""
|
|
2516
|
limit = loglimit(opts)
|
|
2517
|
limit = loglimit(opts)
|
|
2517
|
revs = _logrevs(repo, opts)
|
|
2518
|
revs = _logrevs(repo, opts)
|
|
2518
|
if not revs:
|
|
2519
|
if not revs:
|
|
2519
|
return smartset.baseset(), None, None
|
|
2520
|
return smartset.baseset(), None, None
|
|
2520
|
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
|
|
2521
|
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
|
|
2521
|
if opts.get('rev'):
|
|
2522
|
if opts.get('rev'):
|
|
2522
|
# User-specified revs might be unsorted, but don't sort before
|
|
2523
|
# User-specified revs might be unsorted, but don't sort before
|
|
2523
|
# _makelogrevset because it might depend on the order of revs
|
|
2524
|
# _makelogrevset because it might depend on the order of revs
|
|
2524
|
if not (revs.isdescending() or revs.istopo()):
|
|
2525
|
if not (revs.isdescending() or revs.istopo()):
|
|
2525
|
revs.sort(reverse=True)
|
|
2526
|
revs.sort(reverse=True)
|
|
2526
|
if expr:
|
|
2527
|
if expr:
|
|
2527
|
matcher = revset.match(repo.ui, expr)
|
|
2528
|
matcher = revset.match(repo.ui, expr)
|
|
2528
|
revs = matcher(repo, revs)
|
|
2529
|
revs = matcher(repo, revs)
|
|
2529
|
if limit is not None:
|
|
2530
|
if limit is not None:
|
|
2530
|
limitedrevs = []
|
|
2531
|
limitedrevs = []
|
|
2531
|
for idx, rev in enumerate(revs):
|
|
2532
|
for idx, rev in enumerate(revs):
|
|
2532
|
if idx >= limit:
|
|
2533
|
if idx >= limit:
|
|
2533
|
break
|
|
2534
|
break
|
|
2534
|
limitedrevs.append(rev)
|
|
2535
|
limitedrevs.append(rev)
|
|
2535
|
revs = smartset.baseset(limitedrevs)
|
|
2536
|
revs = smartset.baseset(limitedrevs)
|
|
2536
|
|
|
2537
|
|
|
2537
|
return revs, expr, filematcher
|
|
2538
|
return revs, expr, filematcher
|
|
2538
|
|
|
2539
|
|
|
2539
|
def getlogrevs(repo, pats, opts):
|
|
2540
|
def getlogrevs(repo, pats, opts):
|
|
2540
|
"""Return (revs, expr, filematcher) where revs is an iterable of
|
|
2541
|
"""Return (revs, expr, filematcher) where revs is an iterable of
|
|
2541
|
revision numbers, expr is a revset string built from log options
|
|
2542
|
revision numbers, expr is a revset string built from log options
|
|
2542
|
and file patterns or None, and used to filter 'revs'. If --stat or
|
|
2543
|
and file patterns or None, and used to filter 'revs'. If --stat or
|
|
2543
|
--patch are not passed filematcher is None. Otherwise it is a
|
|
2544
|
--patch are not passed filematcher is None. Otherwise it is a
|
|
2544
|
callable taking a revision number and returning a match objects
|
|
2545
|
callable taking a revision number and returning a match objects
|
|
2545
|
filtering the files to be detailed when displaying the revision.
|
|
2546
|
filtering the files to be detailed when displaying the revision.
|
|
2546
|
"""
|
|
2547
|
"""
|
|
2547
|
limit = loglimit(opts)
|
|
2548
|
limit = loglimit(opts)
|
|
2548
|
revs = _logrevs(repo, opts)
|
|
2549
|
revs = _logrevs(repo, opts)
|
|
2549
|
if not revs:
|
|
2550
|
if not revs:
|
|
2550
|
return smartset.baseset([]), None, None
|
|
2551
|
return smartset.baseset([]), None, None
|
|
2551
|
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
|
|
2552
|
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
|
|
2552
|
if expr:
|
|
2553
|
if expr:
|
|
2553
|
matcher = revset.match(repo.ui, expr)
|
|
2554
|
matcher = revset.match(repo.ui, expr)
|
|
2554
|
revs = matcher(repo, revs)
|
|
2555
|
revs = matcher(repo, revs)
|
|
2555
|
if limit is not None:
|
|
2556
|
if limit is not None:
|
|
2556
|
limitedrevs = []
|
|
2557
|
limitedrevs = []
|
|
2557
|
for idx, r in enumerate(revs):
|
|
2558
|
for idx, r in enumerate(revs):
|
|
2558
|
if limit <= idx:
|
|
2559
|
if limit <= idx:
|
|
2559
|
break
|
|
2560
|
break
|
|
2560
|
limitedrevs.append(r)
|
|
2561
|
limitedrevs.append(r)
|
|
2561
|
revs = smartset.baseset(limitedrevs)
|
|
2562
|
revs = smartset.baseset(limitedrevs)
|
|
2562
|
|
|
2563
|
|
|
2563
|
return revs, expr, filematcher
|
|
2564
|
return revs, expr, filematcher
|
|
2564
|
|
|
2565
|
|
|
2565
|
def _parselinerangelogopt(repo, opts):
|
|
2566
|
def _parselinerangelogopt(repo, opts):
|
|
2566
|
"""Parse --line-range log option and return a list of tuples (filename,
|
|
2567
|
"""Parse --line-range log option and return a list of tuples (filename,
|
|
2567
|
(fromline, toline)).
|
|
2568
|
(fromline, toline)).
|
|
2568
|
"""
|
|
2569
|
"""
|
|
2569
|
linerangebyfname = []
|
|
2570
|
linerangebyfname = []
|
|
2570
|
for pat in opts.get('line_range', []):
|
|
2571
|
for pat in opts.get('line_range', []):
|
|
2571
|
try:
|
|
2572
|
try:
|
|
2572
|
pat, linerange = pat.rsplit(',', 1)
|
|
2573
|
pat, linerange = pat.rsplit(',', 1)
|
|
2573
|
except ValueError:
|
|
2574
|
except ValueError:
|
|
2574
|
raise error.Abort(_('malformatted line-range pattern %s') % pat)
|
|
2575
|
raise error.Abort(_('malformatted line-range pattern %s') % pat)
|
|
2575
|
try:
|
|
2576
|
try:
|
|
2576
|
fromline, toline = map(int, linerange.split(':'))
|
|
2577
|
fromline, toline = map(int, linerange.split(':'))
|
|
2577
|
except ValueError:
|
|
2578
|
except ValueError:
|
|
2578
|
raise error.Abort(_("invalid line range for %s") % pat)
|
|
2579
|
raise error.Abort(_("invalid line range for %s") % pat)
|
|
2579
|
msg = _("line range pattern '%s' must match exactly one file") % pat
|
|
2580
|
msg = _("line range pattern '%s' must match exactly one file") % pat
|
|
2580
|
fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
|
|
2581
|
fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
|
|
2581
|
linerangebyfname.append(
|
|
2582
|
linerangebyfname.append(
|
|
2582
|
(fname, util.processlinerange(fromline, toline)))
|
|
2583
|
(fname, util.processlinerange(fromline, toline)))
|
|
2583
|
return linerangebyfname
|
|
2584
|
return linerangebyfname
|
|
2584
|
|
|
2585
|
|
|
2585
|
def getloglinerangerevs(repo, userrevs, opts):
|
|
2586
|
def getloglinerangerevs(repo, userrevs, opts):
|
|
2586
|
"""Return (revs, filematcher, hunksfilter).
|
|
2587
|
"""Return (revs, filematcher, hunksfilter).
|
|
2587
|
|
|
2588
|
|
|
2588
|
"revs" are revisions obtained by processing "line-range" log options and
|
|
2589
|
"revs" are revisions obtained by processing "line-range" log options and
|
|
2589
|
walking block ancestors of each specified file/line-range.
|
|
2590
|
walking block ancestors of each specified file/line-range.
|
|
2590
|
|
|
2591
|
|
|
2591
|
"filematcher(rev) -> match" is a factory function returning a match object
|
|
2592
|
"filematcher(rev) -> match" is a factory function returning a match object
|
|
2592
|
for a given revision for file patterns specified in --line-range option.
|
|
2593
|
for a given revision for file patterns specified in --line-range option.
|
|
2593
|
If neither --stat nor --patch options are passed, "filematcher" is None.
|
|
2594
|
If neither --stat nor --patch options are passed, "filematcher" is None.
|
|
2594
|
|
|
2595
|
|
|
2595
|
"hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
|
|
2596
|
"hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
|
|
2596
|
returning a hunks filtering function.
|
|
2597
|
returning a hunks filtering function.
|
|
2597
|
If neither --stat nor --patch options are passed, "filterhunks" is None.
|
|
2598
|
If neither --stat nor --patch options are passed, "filterhunks" is None.
|
|
2598
|
"""
|
|
2599
|
"""
|
|
2599
|
wctx = repo[None]
|
|
2600
|
wctx = repo[None]
|
|
2600
|
|
|
2601
|
|
|
2601
|
# Two-levels map of "rev -> file ctx -> [line range]".
|
|
2602
|
# Two-levels map of "rev -> file ctx -> [line range]".
|
|
2602
|
linerangesbyrev = {}
|
|
2603
|
linerangesbyrev = {}
|
|
2603
|
for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
|
|
2604
|
for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
|
|
2604
|
if fname not in wctx:
|
|
2605
|
if fname not in wctx:
|
|
2605
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2606
|
raise error.Abort(_('cannot follow file not in parent '
|
|
2606
|
'revision: "%s"') % fname)
|
|
2607
|
'revision: "%s"') % fname)
|
|
2607
|
fctx = wctx.filectx(fname)
|
|
2608
|
fctx = wctx.filectx(fname)
|
|
2608
|
for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
|
|
2609
|
for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
|
|
2609
|
rev = fctx.introrev()
|
|
2610
|
rev = fctx.introrev()
|
|
2610
|
if rev not in userrevs:
|
|
2611
|
if rev not in userrevs:
|
|
2611
|
continue
|
|
2612
|
continue
|
|
2612
|
linerangesbyrev.setdefault(
|
|
2613
|
linerangesbyrev.setdefault(
|
|
2613
|
rev, {}).setdefault(
|
|
2614
|
rev, {}).setdefault(
|
|
2614
|
fctx.path(), []).append(linerange)
|
|
2615
|
fctx.path(), []).append(linerange)
|
|
2615
|
|
|
2616
|
|
|
2616
|
filematcher = None
|
|
2617
|
filematcher = None
|
|
2617
|
hunksfilter = None
|
|
2618
|
hunksfilter = None
|
|
2618
|
if opts.get('patch') or opts.get('stat'):
|
|
2619
|
if opts.get('patch') or opts.get('stat'):
|
|
2619
|
|
|
2620
|
|
|
2620
|
def nofilterhunksfn(fctx, hunks):
|
|
2621
|
def nofilterhunksfn(fctx, hunks):
|
|
2621
|
return hunks
|
|
2622
|
return hunks
|
|
2622
|
|
|
2623
|
|
|
2623
|
def hunksfilter(rev):
|
|
2624
|
def hunksfilter(rev):
|
|
2624
|
fctxlineranges = linerangesbyrev.get(rev)
|
|
2625
|
fctxlineranges = linerangesbyrev.get(rev)
|
|
2625
|
if fctxlineranges is None:
|
|
2626
|
if fctxlineranges is None:
|
|
2626
|
return nofilterhunksfn
|
|
2627
|
return nofilterhunksfn
|
|
2627
|
|
|
2628
|
|
|
2628
|
def filterfn(fctx, hunks):
|
|
2629
|
def filterfn(fctx, hunks):
|
|
2629
|
lineranges = fctxlineranges.get(fctx.path())
|
|
2630
|
lineranges = fctxlineranges.get(fctx.path())
|
|
2630
|
if lineranges is not None:
|
|
2631
|
if lineranges is not None:
|
|
2631
|
for hr, lines in hunks:
|
|
2632
|
for hr, lines in hunks:
|
|
2632
|
if hr is None: # binary
|
|
2633
|
if hr is None: # binary
|
|
2633
|
yield hr, lines
|
|
2634
|
yield hr, lines
|
|
2634
|
continue
|
|
2635
|
continue
|
|
2635
|
if any(mdiff.hunkinrange(hr[2:], lr)
|
|
2636
|
if any(mdiff.hunkinrange(hr[2:], lr)
|
|
2636
|
for lr in lineranges):
|
|
2637
|
for lr in lineranges):
|
|
2637
|
yield hr, lines
|
|
2638
|
yield hr, lines
|
|
2638
|
else:
|
|
2639
|
else:
|
|
2639
|
for hunk in hunks:
|
|
2640
|
for hunk in hunks:
|
|
2640
|
yield hunk
|
|
2641
|
yield hunk
|
|
2641
|
|
|
2642
|
|
|
2642
|
return filterfn
|
|
2643
|
return filterfn
|
|
2643
|
|
|
2644
|
|
|
2644
|
def filematcher(rev):
|
|
2645
|
def filematcher(rev):
|
|
2645
|
files = list(linerangesbyrev.get(rev, []))
|
|
2646
|
files = list(linerangesbyrev.get(rev, []))
|
|
2646
|
return scmutil.matchfiles(repo, files)
|
|
2647
|
return scmutil.matchfiles(repo, files)
|
|
2647
|
|
|
2648
|
|
|
2648
|
revs = sorted(linerangesbyrev, reverse=True)
|
|
2649
|
revs = sorted(linerangesbyrev, reverse=True)
|
|
2649
|
|
|
2650
|
|
|
2650
|
return revs, filematcher, hunksfilter
|
|
2651
|
return revs, filematcher, hunksfilter
|
|
2651
|
|
|
2652
|
|
|
2652
|
def _graphnodeformatter(ui, displayer):
|
|
2653
|
def _graphnodeformatter(ui, displayer):
|
|
2653
|
spec = ui.config('ui', 'graphnodetemplate')
|
|
2654
|
spec = ui.config('ui', 'graphnodetemplate')
|
|
2654
|
if not spec:
|
|
2655
|
if not spec:
|
|
2655
|
return templatekw.showgraphnode # fast path for "{graphnode}"
|
|
2656
|
return templatekw.showgraphnode # fast path for "{graphnode}"
|
|
2656
|
|
|
2657
|
|
|
2657
|
spec = templater.unquotestring(spec)
|
|
2658
|
spec = templater.unquotestring(spec)
|
|
2658
|
tres = formatter.templateresources(ui)
|
|
2659
|
tres = formatter.templateresources(ui)
|
|
2659
|
if isinstance(displayer, changeset_templater):
|
|
2660
|
if isinstance(displayer, changeset_templater):
|
|
2660
|
tres['cache'] = displayer.cache # reuse cache of slow templates
|
|
2661
|
tres['cache'] = displayer.cache # reuse cache of slow templates
|
|
2661
|
templ = formatter.maketemplater(ui, spec, resources=tres)
|
|
2662
|
templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords,
|
|
2662
|
props = templatekw.keywords.copy()
|
|
2663
|
resources=tres)
|
|
2663
|
def formatnode(repo, ctx):
|
|
2664
|
def formatnode(repo, ctx):
|
|
2664
|
props['ctx'] = ctx
|
|
2665
|
props = {'ctx': ctx, 'repo': repo, 'revcache': {}}
|
|
2665
|
props['repo'] = repo
|
|
|
|
|
2666
|
props['revcache'] = {}
|
|
|
|
|
2667
|
return templ.render(props)
|
|
2666
|
return templ.render(props)
|
|
2668
|
return formatnode
|
|
2667
|
return formatnode
|
|
2669
|
|
|
2668
|
|
|
2670
|
def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
|
|
2669
|
def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
|
|
2671
|
filematcher=None, props=None):
|
|
2670
|
filematcher=None, props=None):
|
|
2672
|
props = props or {}
|
|
2671
|
props = props or {}
|
|
2673
|
formatnode = _graphnodeformatter(ui, displayer)
|
|
2672
|
formatnode = _graphnodeformatter(ui, displayer)
|
|
2674
|
state = graphmod.asciistate()
|
|
2673
|
state = graphmod.asciistate()
|
|
2675
|
styles = state['styles']
|
|
2674
|
styles = state['styles']
|
|
2676
|
|
|
2675
|
|
|
2677
|
# only set graph styling if HGPLAIN is not set.
|
|
2676
|
# only set graph styling if HGPLAIN is not set.
|
|
2678
|
if ui.plain('graph'):
|
|
2677
|
if ui.plain('graph'):
|
|
2679
|
# set all edge styles to |, the default pre-3.8 behaviour
|
|
2678
|
# set all edge styles to |, the default pre-3.8 behaviour
|
|
2680
|
styles.update(dict.fromkeys(styles, '|'))
|
|
2679
|
styles.update(dict.fromkeys(styles, '|'))
|
|
2681
|
else:
|
|
2680
|
else:
|
|
2682
|
edgetypes = {
|
|
2681
|
edgetypes = {
|
|
2683
|
'parent': graphmod.PARENT,
|
|
2682
|
'parent': graphmod.PARENT,
|
|
2684
|
'grandparent': graphmod.GRANDPARENT,
|
|
2683
|
'grandparent': graphmod.GRANDPARENT,
|
|
2685
|
'missing': graphmod.MISSINGPARENT
|
|
2684
|
'missing': graphmod.MISSINGPARENT
|
|
2686
|
}
|
|
2685
|
}
|
|
2687
|
for name, key in edgetypes.items():
|
|
2686
|
for name, key in edgetypes.items():
|
|
2688
|
# experimental config: experimental.graphstyle.*
|
|
2687
|
# experimental config: experimental.graphstyle.*
|
|
2689
|
styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
|
|
2688
|
styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
|
|
2690
|
styles[key])
|
|
2689
|
styles[key])
|
|
2691
|
if not styles[key]:
|
|
2690
|
if not styles[key]:
|
|
2692
|
styles[key] = None
|
|
2691
|
styles[key] = None
|
|
2693
|
|
|
2692
|
|
|
2694
|
# experimental config: experimental.graphshorten
|
|
2693
|
# experimental config: experimental.graphshorten
|
|
2695
|
state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
|
|
2694
|
state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
|
|
2696
|
|
|
2695
|
|
|
2697
|
for rev, type, ctx, parents in dag:
|
|
2696
|
for rev, type, ctx, parents in dag:
|
|
2698
|
char = formatnode(repo, ctx)
|
|
2697
|
char = formatnode(repo, ctx)
|
|
2699
|
copies = None
|
|
2698
|
copies = None
|
|
2700
|
if getrenamed and ctx.rev():
|
|
2699
|
if getrenamed and ctx.rev():
|
|
2701
|
copies = []
|
|
2700
|
copies = []
|
|
2702
|
for fn in ctx.files():
|
|
2701
|
for fn in ctx.files():
|
|
2703
|
rename = getrenamed(fn, ctx.rev())
|
|
2702
|
rename = getrenamed(fn, ctx.rev())
|
|
2704
|
if rename:
|
|
2703
|
if rename:
|
|
2705
|
copies.append((fn, rename[0]))
|
|
2704
|
copies.append((fn, rename[0]))
|
|
2706
|
revmatchfn = None
|
|
2705
|
revmatchfn = None
|
|
2707
|
if filematcher is not None:
|
|
2706
|
if filematcher is not None:
|
|
2708
|
revmatchfn = filematcher(ctx.rev())
|
|
2707
|
revmatchfn = filematcher(ctx.rev())
|
|
2709
|
edges = edgefn(type, char, state, rev, parents)
|
|
2708
|
edges = edgefn(type, char, state, rev, parents)
|
|
2710
|
firstedge = next(edges)
|
|
2709
|
firstedge = next(edges)
|
|
2711
|
width = firstedge[2]
|
|
2710
|
width = firstedge[2]
|
|
2712
|
displayer.show(ctx, copies=copies, matchfn=revmatchfn,
|
|
2711
|
displayer.show(ctx, copies=copies, matchfn=revmatchfn,
|
|
2713
|
_graphwidth=width, **pycompat.strkwargs(props))
|
|
2712
|
_graphwidth=width, **pycompat.strkwargs(props))
|
|
2714
|
lines = displayer.hunk.pop(rev).split('\n')
|
|
2713
|
lines = displayer.hunk.pop(rev).split('\n')
|
|
2715
|
if not lines[-1]:
|
|
2714
|
if not lines[-1]:
|
|
2716
|
del lines[-1]
|
|
2715
|
del lines[-1]
|
|
2717
|
displayer.flush(ctx)
|
|
2716
|
displayer.flush(ctx)
|
|
2718
|
for type, char, width, coldata in itertools.chain([firstedge], edges):
|
|
2717
|
for type, char, width, coldata in itertools.chain([firstedge], edges):
|
|
2719
|
graphmod.ascii(ui, state, type, char, lines, coldata)
|
|
2718
|
graphmod.ascii(ui, state, type, char, lines, coldata)
|
|
2720
|
lines = []
|
|
2719
|
lines = []
|
|
2721
|
displayer.close()
|
|
2720
|
displayer.close()
|
|
2722
|
|
|
2721
|
|
|
2723
|
def graphlog(ui, repo, pats, opts):
|
|
2722
|
def graphlog(ui, repo, pats, opts):
|
|
2724
|
# Parameters are identical to log command ones
|
|
2723
|
# Parameters are identical to log command ones
|
|
2725
|
revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
|
|
2724
|
revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
|
|
2726
|
revdag = graphmod.dagwalker(repo, revs)
|
|
2725
|
revdag = graphmod.dagwalker(repo, revs)
|
|
2727
|
|
|
2726
|
|
|
2728
|
getrenamed = None
|
|
2727
|
getrenamed = None
|
|
2729
|
if opts.get('copies'):
|
|
2728
|
if opts.get('copies'):
|
|
2730
|
endrev = None
|
|
2729
|
endrev = None
|
|
2731
|
if opts.get('rev'):
|
|
2730
|
if opts.get('rev'):
|
|
2732
|
endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
|
|
2731
|
endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
|
|
2733
|
getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
|
|
2732
|
getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
|
|
2734
|
|
|
2733
|
|
|
2735
|
ui.pager('log')
|
|
2734
|
ui.pager('log')
|
|
2736
|
displayer = show_changeset(ui, repo, opts, buffered=True)
|
|
2735
|
displayer = show_changeset(ui, repo, opts, buffered=True)
|
|
2737
|
displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
|
|
2736
|
displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
|
|
2738
|
filematcher)
|
|
2737
|
filematcher)
|
|
2739
|
|
|
2738
|
|
|
2740
|
def checkunsupportedgraphflags(pats, opts):
|
|
2739
|
def checkunsupportedgraphflags(pats, opts):
|
|
2741
|
for op in ["newest_first"]:
|
|
2740
|
for op in ["newest_first"]:
|
|
2742
|
if op in opts and opts[op]:
|
|
2741
|
if op in opts and opts[op]:
|
|
2743
|
raise error.Abort(_("-G/--graph option is incompatible with --%s")
|
|
2742
|
raise error.Abort(_("-G/--graph option is incompatible with --%s")
|
|
2744
|
% op.replace("_", "-"))
|
|
2743
|
% op.replace("_", "-"))
|
|
2745
|
|
|
2744
|
|
|
2746
|
def graphrevs(repo, nodes, opts):
|
|
2745
|
def graphrevs(repo, nodes, opts):
|
|
2747
|
limit = loglimit(opts)
|
|
2746
|
limit = loglimit(opts)
|
|
2748
|
nodes.reverse()
|
|
2747
|
nodes.reverse()
|
|
2749
|
if limit is not None:
|
|
2748
|
if limit is not None:
|
|
2750
|
nodes = nodes[:limit]
|
|
2749
|
nodes = nodes[:limit]
|
|
2751
|
return graphmod.nodes(repo, nodes)
|
|
2750
|
return graphmod.nodes(repo, nodes)
|
|
2752
|
|
|
2751
|
|
|
2753
|
def add(ui, repo, match, prefix, explicitonly, **opts):
|
|
2752
|
def add(ui, repo, match, prefix, explicitonly, **opts):
|
|
2754
|
join = lambda f: os.path.join(prefix, f)
|
|
2753
|
join = lambda f: os.path.join(prefix, f)
|
|
2755
|
bad = []
|
|
2754
|
bad = []
|
|
2756
|
|
|
2755
|
|
|
2757
|
badfn = lambda x, y: bad.append(x) or match.bad(x, y)
|
|
2756
|
badfn = lambda x, y: bad.append(x) or match.bad(x, y)
|
|
2758
|
names = []
|
|
2757
|
names = []
|
|
2759
|
wctx = repo[None]
|
|
2758
|
wctx = repo[None]
|
|
2760
|
cca = None
|
|
2759
|
cca = None
|
|
2761
|
abort, warn = scmutil.checkportabilityalert(ui)
|
|
2760
|
abort, warn = scmutil.checkportabilityalert(ui)
|
|
2762
|
if abort or warn:
|
|
2761
|
if abort or warn:
|
|
2763
|
cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
|
|
2762
|
cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
|
|
2764
|
|
|
2763
|
|
|
2765
|
badmatch = matchmod.badmatch(match, badfn)
|
|
2764
|
badmatch = matchmod.badmatch(match, badfn)
|
|
2766
|
dirstate = repo.dirstate
|
|
2765
|
dirstate = repo.dirstate
|
|
2767
|
# We don't want to just call wctx.walk here, since it would return a lot of
|
|
2766
|
# We don't want to just call wctx.walk here, since it would return a lot of
|
|
2768
|
# clean files, which we aren't interested in and takes time.
|
|
2767
|
# clean files, which we aren't interested in and takes time.
|
|
2769
|
for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
|
|
2768
|
for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
|
|
2770
|
unknown=True, ignored=False, full=False)):
|
|
2769
|
unknown=True, ignored=False, full=False)):
|
|
2771
|
exact = match.exact(f)
|
|
2770
|
exact = match.exact(f)
|
|
2772
|
if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
|
|
2771
|
if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
|
|
2773
|
if cca:
|
|
2772
|
if cca:
|
|
2774
|
cca(f)
|
|
2773
|
cca(f)
|
|
2775
|
names.append(f)
|
|
2774
|
names.append(f)
|
|
2776
|
if ui.verbose or not exact:
|
|
2775
|
if ui.verbose or not exact:
|
|
2777
|
ui.status(_('adding %s\n') % match.rel(f))
|
|
2776
|
ui.status(_('adding %s\n') % match.rel(f))
|
|
2778
|
|
|
2777
|
|
|
2779
|
for subpath in sorted(wctx.substate):
|
|
2778
|
for subpath in sorted(wctx.substate):
|
|
2780
|
sub = wctx.sub(subpath)
|
|
2779
|
sub = wctx.sub(subpath)
|
|
2781
|
try:
|
|
2780
|
try:
|
|
2782
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
2781
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
2783
|
if opts.get(r'subrepos'):
|
|
2782
|
if opts.get(r'subrepos'):
|
|
2784
|
bad.extend(sub.add(ui, submatch, prefix, False, **opts))
|
|
2783
|
bad.extend(sub.add(ui, submatch, prefix, False, **opts))
|
|
2785
|
else:
|
|
2784
|
else:
|
|
2786
|
bad.extend(sub.add(ui, submatch, prefix, True, **opts))
|
|
2785
|
bad.extend(sub.add(ui, submatch, prefix, True, **opts))
|
|
2787
|
except error.LookupError:
|
|
2786
|
except error.LookupError:
|
|
2788
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2787
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2789
|
% join(subpath))
|
|
2788
|
% join(subpath))
|
|
2790
|
|
|
2789
|
|
|
2791
|
if not opts.get(r'dry_run'):
|
|
2790
|
if not opts.get(r'dry_run'):
|
|
2792
|
rejected = wctx.add(names, prefix)
|
|
2791
|
rejected = wctx.add(names, prefix)
|
|
2793
|
bad.extend(f for f in rejected if f in match.files())
|
|
2792
|
bad.extend(f for f in rejected if f in match.files())
|
|
2794
|
return bad
|
|
2793
|
return bad
|
|
2795
|
|
|
2794
|
|
|
2796
|
def addwebdirpath(repo, serverpath, webconf):
|
|
2795
|
def addwebdirpath(repo, serverpath, webconf):
|
|
2797
|
webconf[serverpath] = repo.root
|
|
2796
|
webconf[serverpath] = repo.root
|
|
2798
|
repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
|
|
2797
|
repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
|
|
2799
|
|
|
2798
|
|
|
2800
|
for r in repo.revs('filelog("path:.hgsub")'):
|
|
2799
|
for r in repo.revs('filelog("path:.hgsub")'):
|
|
2801
|
ctx = repo[r]
|
|
2800
|
ctx = repo[r]
|
|
2802
|
for subpath in ctx.substate:
|
|
2801
|
for subpath in ctx.substate:
|
|
2803
|
ctx.sub(subpath).addwebdirpath(serverpath, webconf)
|
|
2802
|
ctx.sub(subpath).addwebdirpath(serverpath, webconf)
|
|
2804
|
|
|
2803
|
|
|
2805
|
def forget(ui, repo, match, prefix, explicitonly):
|
|
2804
|
def forget(ui, repo, match, prefix, explicitonly):
|
|
2806
|
join = lambda f: os.path.join(prefix, f)
|
|
2805
|
join = lambda f: os.path.join(prefix, f)
|
|
2807
|
bad = []
|
|
2806
|
bad = []
|
|
2808
|
badfn = lambda x, y: bad.append(x) or match.bad(x, y)
|
|
2807
|
badfn = lambda x, y: bad.append(x) or match.bad(x, y)
|
|
2809
|
wctx = repo[None]
|
|
2808
|
wctx = repo[None]
|
|
2810
|
forgot = []
|
|
2809
|
forgot = []
|
|
2811
|
|
|
2810
|
|
|
2812
|
s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
|
|
2811
|
s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
|
|
2813
|
forget = sorted(s.modified + s.added + s.deleted + s.clean)
|
|
2812
|
forget = sorted(s.modified + s.added + s.deleted + s.clean)
|
|
2814
|
if explicitonly:
|
|
2813
|
if explicitonly:
|
|
2815
|
forget = [f for f in forget if match.exact(f)]
|
|
2814
|
forget = [f for f in forget if match.exact(f)]
|
|
2816
|
|
|
2815
|
|
|
2817
|
for subpath in sorted(wctx.substate):
|
|
2816
|
for subpath in sorted(wctx.substate):
|
|
2818
|
sub = wctx.sub(subpath)
|
|
2817
|
sub = wctx.sub(subpath)
|
|
2819
|
try:
|
|
2818
|
try:
|
|
2820
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
2819
|
submatch = matchmod.subdirmatcher(subpath, match)
|
|
2821
|
subbad, subforgot = sub.forget(submatch, prefix)
|
|
2820
|
subbad, subforgot = sub.forget(submatch, prefix)
|
|
2822
|
bad.extend([subpath + '/' + f for f in subbad])
|
|
2821
|
bad.extend([subpath + '/' + f for f in subbad])
|
|
2823
|
forgot.extend([subpath + '/' + f for f in subforgot])
|
|
2822
|
forgot.extend([subpath + '/' + f for f in subforgot])
|
|
2824
|
except error.LookupError:
|
|
2823
|
except error.LookupError:
|
|
2825
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2824
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2826
|
% join(subpath))
|
|
2825
|
% join(subpath))
|
|
2827
|
|
|
2826
|
|
|
2828
|
if not explicitonly:
|
|
2827
|
if not explicitonly:
|
|
2829
|
for f in match.files():
|
|
2828
|
for f in match.files():
|
|
2830
|
if f not in repo.dirstate and not repo.wvfs.isdir(f):
|
|
2829
|
if f not in repo.dirstate and not repo.wvfs.isdir(f):
|
|
2831
|
if f not in forgot:
|
|
2830
|
if f not in forgot:
|
|
2832
|
if repo.wvfs.exists(f):
|
|
2831
|
if repo.wvfs.exists(f):
|
|
2833
|
# Don't complain if the exact case match wasn't given.
|
|
2832
|
# Don't complain if the exact case match wasn't given.
|
|
2834
|
# But don't do this until after checking 'forgot', so
|
|
2833
|
# But don't do this until after checking 'forgot', so
|
|
2835
|
# that subrepo files aren't normalized, and this op is
|
|
2834
|
# that subrepo files aren't normalized, and this op is
|
|
2836
|
# purely from data cached by the status walk above.
|
|
2835
|
# purely from data cached by the status walk above.
|
|
2837
|
if repo.dirstate.normalize(f) in repo.dirstate:
|
|
2836
|
if repo.dirstate.normalize(f) in repo.dirstate:
|
|
2838
|
continue
|
|
2837
|
continue
|
|
2839
|
ui.warn(_('not removing %s: '
|
|
2838
|
ui.warn(_('not removing %s: '
|
|
2840
|
'file is already untracked\n')
|
|
2839
|
'file is already untracked\n')
|
|
2841
|
% match.rel(f))
|
|
2840
|
% match.rel(f))
|
|
2842
|
bad.append(f)
|
|
2841
|
bad.append(f)
|
|
2843
|
|
|
2842
|
|
|
2844
|
for f in forget:
|
|
2843
|
for f in forget:
|
|
2845
|
if ui.verbose or not match.exact(f):
|
|
2844
|
if ui.verbose or not match.exact(f):
|
|
2846
|
ui.status(_('removing %s\n') % match.rel(f))
|
|
2845
|
ui.status(_('removing %s\n') % match.rel(f))
|
|
2847
|
|
|
2846
|
|
|
2848
|
rejected = wctx.forget(forget, prefix)
|
|
2847
|
rejected = wctx.forget(forget, prefix)
|
|
2849
|
bad.extend(f for f in rejected if f in match.files())
|
|
2848
|
bad.extend(f for f in rejected if f in match.files())
|
|
2850
|
forgot.extend(f for f in forget if f not in rejected)
|
|
2849
|
forgot.extend(f for f in forget if f not in rejected)
|
|
2851
|
return bad, forgot
|
|
2850
|
return bad, forgot
|
|
2852
|
|
|
2851
|
|
|
2853
|
def files(ui, ctx, m, fm, fmt, subrepos):
|
|
2852
|
def files(ui, ctx, m, fm, fmt, subrepos):
|
|
2854
|
rev = ctx.rev()
|
|
2853
|
rev = ctx.rev()
|
|
2855
|
ret = 1
|
|
2854
|
ret = 1
|
|
2856
|
ds = ctx.repo().dirstate
|
|
2855
|
ds = ctx.repo().dirstate
|
|
2857
|
|
|
2856
|
|
|
2858
|
for f in ctx.matches(m):
|
|
2857
|
for f in ctx.matches(m):
|
|
2859
|
if rev is None and ds[f] == 'r':
|
|
2858
|
if rev is None and ds[f] == 'r':
|
|
2860
|
continue
|
|
2859
|
continue
|
|
2861
|
fm.startitem()
|
|
2860
|
fm.startitem()
|
|
2862
|
if ui.verbose:
|
|
2861
|
if ui.verbose:
|
|
2863
|
fc = ctx[f]
|
|
2862
|
fc = ctx[f]
|
|
2864
|
fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
|
|
2863
|
fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
|
|
2865
|
fm.data(abspath=f)
|
|
2864
|
fm.data(abspath=f)
|
|
2866
|
fm.write('path', fmt, m.rel(f))
|
|
2865
|
fm.write('path', fmt, m.rel(f))
|
|
2867
|
ret = 0
|
|
2866
|
ret = 0
|
|
2868
|
|
|
2867
|
|
|
2869
|
for subpath in sorted(ctx.substate):
|
|
2868
|
for subpath in sorted(ctx.substate):
|
|
2870
|
submatch = matchmod.subdirmatcher(subpath, m)
|
|
2869
|
submatch = matchmod.subdirmatcher(subpath, m)
|
|
2871
|
if (subrepos or m.exact(subpath) or any(submatch.files())):
|
|
2870
|
if (subrepos or m.exact(subpath) or any(submatch.files())):
|
|
2872
|
sub = ctx.sub(subpath)
|
|
2871
|
sub = ctx.sub(subpath)
|
|
2873
|
try:
|
|
2872
|
try:
|
|
2874
|
recurse = m.exact(subpath) or subrepos
|
|
2873
|
recurse = m.exact(subpath) or subrepos
|
|
2875
|
if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
|
|
2874
|
if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
|
|
2876
|
ret = 0
|
|
2875
|
ret = 0
|
|
2877
|
except error.LookupError:
|
|
2876
|
except error.LookupError:
|
|
2878
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2877
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
2879
|
% m.abs(subpath))
|
|
2878
|
% m.abs(subpath))
|
|
2880
|
|
|
2879
|
|
|
2881
|
return ret
|
|
2880
|
return ret
|
|
2882
|
|
|
2881
|
|
|
2883
|
def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
|
|
2882
|
def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
|
|
2884
|
join = lambda f: os.path.join(prefix, f)
|
|
2883
|
join = lambda f: os.path.join(prefix, f)
|
|
2885
|
ret = 0
|
|
2884
|
ret = 0
|
|
2886
|
s = repo.status(match=m, clean=True)
|
|
2885
|
s = repo.status(match=m, clean=True)
|
|
2887
|
modified, added, deleted, clean = s[0], s[1], s[3], s[6]
|
|
2886
|
modified, added, deleted, clean = s[0], s[1], s[3], s[6]
|
|
2888
|
|
|
2887
|
|
|
2889
|
wctx = repo[None]
|
|
2888
|
wctx = repo[None]
|
|
2890
|
|
|
2889
|
|
|
2891
|
if warnings is None:
|
|
2890
|
if warnings is None:
|
|
2892
|
warnings = []
|
|
2891
|
warnings = []
|
|
2893
|
warn = True
|
|
2892
|
warn = True
|
|
2894
|
else:
|
|
2893
|
else:
|
|
2895
|
warn = False
|
|
2894
|
warn = False
|
|
2896
|
|
|
2895
|
|
|
2897
|
subs = sorted(wctx.substate)
|
|
2896
|
subs = sorted(wctx.substate)
|
|
2898
|
total = len(subs)
|
|
2897
|
total = len(subs)
|
|
2899
|
count = 0
|
|
2898
|
count = 0
|
|
2900
|
for subpath in subs:
|
|
2899
|
for subpath in subs:
|
|
2901
|
count += 1
|
|
2900
|
count += 1
|
|
2902
|
submatch = matchmod.subdirmatcher(subpath, m)
|
|
2901
|
submatch = matchmod.subdirmatcher(subpath, m)
|
|
2903
|
if subrepos or m.exact(subpath) or any(submatch.files()):
|
|
2902
|
if subrepos or m.exact(subpath) or any(submatch.files()):
|
|
2904
|
ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
|
|
2903
|
ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
|
|
2905
|
sub = wctx.sub(subpath)
|
|
2904
|
sub = wctx.sub(subpath)
|
|
2906
|
try:
|
|
2905
|
try:
|
|
2907
|
if sub.removefiles(submatch, prefix, after, force, subrepos,
|
|
2906
|
if sub.removefiles(submatch, prefix, after, force, subrepos,
|
|
2908
|
warnings):
|
|
2907
|
warnings):
|
|
2909
|
ret = 1
|
|
2908
|
ret = 1
|
|
2910
|
except error.LookupError:
|
|
2909
|
except error.LookupError:
|
|
2911
|
warnings.append(_("skipping missing subrepository: %s\n")
|
|
2910
|
warnings.append(_("skipping missing subrepository: %s\n")
|
|
2912
|
% join(subpath))
|
|
2911
|
% join(subpath))
|
|
2913
|
ui.progress(_('searching'), None)
|
|
2912
|
ui.progress(_('searching'), None)
|
|
2914
|
|
|
2913
|
|
|
2915
|
# warn about failure to delete explicit files/dirs
|
|
2914
|
# warn about failure to delete explicit files/dirs
|
|
2916
|
deleteddirs = util.dirs(deleted)
|
|
2915
|
deleteddirs = util.dirs(deleted)
|
|
2917
|
files = m.files()
|
|
2916
|
files = m.files()
|
|
2918
|
total = len(files)
|
|
2917
|
total = len(files)
|
|
2919
|
count = 0
|
|
2918
|
count = 0
|
|
2920
|
for f in files:
|
|
2919
|
for f in files:
|
|
2921
|
def insubrepo():
|
|
2920
|
def insubrepo():
|
|
2922
|
for subpath in wctx.substate:
|
|
2921
|
for subpath in wctx.substate:
|
|
2923
|
if f.startswith(subpath + '/'):
|
|
2922
|
if f.startswith(subpath + '/'):
|
|
2924
|
return True
|
|
2923
|
return True
|
|
2925
|
return False
|
|
2924
|
return False
|
|
2926
|
|
|
2925
|
|
|
2927
|
count += 1
|
|
2926
|
count += 1
|
|
2928
|
ui.progress(_('deleting'), count, total=total, unit=_('files'))
|
|
2927
|
ui.progress(_('deleting'), count, total=total, unit=_('files'))
|
|
2929
|
isdir = f in deleteddirs or wctx.hasdir(f)
|
|
2928
|
isdir = f in deleteddirs or wctx.hasdir(f)
|
|
2930
|
if (f in repo.dirstate or isdir or f == '.'
|
|
2929
|
if (f in repo.dirstate or isdir or f == '.'
|
|
2931
|
or insubrepo() or f in subs):
|
|
2930
|
or insubrepo() or f in subs):
|
|
2932
|
continue
|
|
2931
|
continue
|
|
2933
|
|
|
2932
|
|
|
2934
|
if repo.wvfs.exists(f):
|
|
2933
|
if repo.wvfs.exists(f):
|
|
2935
|
if repo.wvfs.isdir(f):
|
|
2934
|
if repo.wvfs.isdir(f):
|
|
2936
|
warnings.append(_('not removing %s: no tracked files\n')
|
|
2935
|
warnings.append(_('not removing %s: no tracked files\n')
|
|
2937
|
% m.rel(f))
|
|
2936
|
% m.rel(f))
|
|
2938
|
else:
|
|
2937
|
else:
|
|
2939
|
warnings.append(_('not removing %s: file is untracked\n')
|
|
2938
|
warnings.append(_('not removing %s: file is untracked\n')
|
|
2940
|
% m.rel(f))
|
|
2939
|
% m.rel(f))
|
|
2941
|
# missing files will generate a warning elsewhere
|
|
2940
|
# missing files will generate a warning elsewhere
|
|
2942
|
ret = 1
|
|
2941
|
ret = 1
|
|
2943
|
ui.progress(_('deleting'), None)
|
|
2942
|
ui.progress(_('deleting'), None)
|
|
2944
|
|
|
2943
|
|
|
2945
|
if force:
|
|
2944
|
if force:
|
|
2946
|
list = modified + deleted + clean + added
|
|
2945
|
list = modified + deleted + clean + added
|
|
2947
|
elif after:
|
|
2946
|
elif after:
|
|
2948
|
list = deleted
|
|
2947
|
list = deleted
|
|
2949
|
remaining = modified + added + clean
|
|
2948
|
remaining = modified + added + clean
|
|
2950
|
total = len(remaining)
|
|
2949
|
total = len(remaining)
|
|
2951
|
count = 0
|
|
2950
|
count = 0
|
|
2952
|
for f in remaining:
|
|
2951
|
for f in remaining:
|
|
2953
|
count += 1
|
|
2952
|
count += 1
|
|
2954
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2953
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2955
|
if ui.verbose or (f in files):
|
|
2954
|
if ui.verbose or (f in files):
|
|
2956
|
warnings.append(_('not removing %s: file still exists\n')
|
|
2955
|
warnings.append(_('not removing %s: file still exists\n')
|
|
2957
|
% m.rel(f))
|
|
2956
|
% m.rel(f))
|
|
2958
|
ret = 1
|
|
2957
|
ret = 1
|
|
2959
|
ui.progress(_('skipping'), None)
|
|
2958
|
ui.progress(_('skipping'), None)
|
|
2960
|
else:
|
|
2959
|
else:
|
|
2961
|
list = deleted + clean
|
|
2960
|
list = deleted + clean
|
|
2962
|
total = len(modified) + len(added)
|
|
2961
|
total = len(modified) + len(added)
|
|
2963
|
count = 0
|
|
2962
|
count = 0
|
|
2964
|
for f in modified:
|
|
2963
|
for f in modified:
|
|
2965
|
count += 1
|
|
2964
|
count += 1
|
|
2966
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2965
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2967
|
warnings.append(_('not removing %s: file is modified (use -f'
|
|
2966
|
warnings.append(_('not removing %s: file is modified (use -f'
|
|
2968
|
' to force removal)\n') % m.rel(f))
|
|
2967
|
' to force removal)\n') % m.rel(f))
|
|
2969
|
ret = 1
|
|
2968
|
ret = 1
|
|
2970
|
for f in added:
|
|
2969
|
for f in added:
|
|
2971
|
count += 1
|
|
2970
|
count += 1
|
|
2972
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2971
|
ui.progress(_('skipping'), count, total=total, unit=_('files'))
|
|
2973
|
warnings.append(_("not removing %s: file has been marked for add"
|
|
2972
|
warnings.append(_("not removing %s: file has been marked for add"
|
|
2974
|
" (use 'hg forget' to undo add)\n") % m.rel(f))
|
|
2973
|
" (use 'hg forget' to undo add)\n") % m.rel(f))
|
|
2975
|
ret = 1
|
|
2974
|
ret = 1
|
|
2976
|
ui.progress(_('skipping'), None)
|
|
2975
|
ui.progress(_('skipping'), None)
|
|
2977
|
|
|
2976
|
|
|
2978
|
list = sorted(list)
|
|
2977
|
list = sorted(list)
|
|
2979
|
total = len(list)
|
|
2978
|
total = len(list)
|
|
2980
|
count = 0
|
|
2979
|
count = 0
|
|
2981
|
for f in list:
|
|
2980
|
for f in list:
|
|
2982
|
count += 1
|
|
2981
|
count += 1
|
|
2983
|
if ui.verbose or not m.exact(f):
|
|
2982
|
if ui.verbose or not m.exact(f):
|
|
2984
|
ui.progress(_('deleting'), count, total=total, unit=_('files'))
|
|
2983
|
ui.progress(_('deleting'), count, total=total, unit=_('files'))
|
|
2985
|
ui.status(_('removing %s\n') % m.rel(f))
|
|
2984
|
ui.status(_('removing %s\n') % m.rel(f))
|
|
2986
|
ui.progress(_('deleting'), None)
|
|
2985
|
ui.progress(_('deleting'), None)
|
|
2987
|
|
|
2986
|
|
|
2988
|
with repo.wlock():
|
|
2987
|
with repo.wlock():
|
|
2989
|
if not after:
|
|
2988
|
if not after:
|
|
2990
|
for f in list:
|
|
2989
|
for f in list:
|
|
2991
|
if f in added:
|
|
2990
|
if f in added:
|
|
2992
|
continue # we never unlink added files on remove
|
|
2991
|
continue # we never unlink added files on remove
|
|
2993
|
repo.wvfs.unlinkpath(f, ignoremissing=True)
|
|
2992
|
repo.wvfs.unlinkpath(f, ignoremissing=True)
|
|
2994
|
repo[None].forget(list)
|
|
2993
|
repo[None].forget(list)
|
|
2995
|
|
|
2994
|
|
|
2996
|
if warn:
|
|
2995
|
if warn:
|
|
2997
|
for warning in warnings:
|
|
2996
|
for warning in warnings:
|
|
2998
|
ui.warn(warning)
|
|
2997
|
ui.warn(warning)
|
|
2999
|
|
|
2998
|
|
|
3000
|
return ret
|
|
2999
|
return ret
|
|
3001
|
|
|
3000
|
|
|
3002
|
def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
|
|
3001
|
def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
|
|
3003
|
err = 1
|
|
3002
|
err = 1
|
|
3004
|
opts = pycompat.byteskwargs(opts)
|
|
3003
|
opts = pycompat.byteskwargs(opts)
|
|
3005
|
|
|
3004
|
|
|
3006
|
def write(path):
|
|
3005
|
def write(path):
|
|
3007
|
filename = None
|
|
3006
|
filename = None
|
|
3008
|
if fntemplate:
|
|
3007
|
if fntemplate:
|
|
3009
|
filename = makefilename(repo, fntemplate, ctx.node(),
|
|
3008
|
filename = makefilename(repo, fntemplate, ctx.node(),
|
|
3010
|
pathname=os.path.join(prefix, path))
|
|
3009
|
pathname=os.path.join(prefix, path))
|
|
3011
|
# attempt to create the directory if it does not already exist
|
|
3010
|
# attempt to create the directory if it does not already exist
|
|
3012
|
try:
|
|
3011
|
try:
|
|
3013
|
os.makedirs(os.path.dirname(filename))
|
|
3012
|
os.makedirs(os.path.dirname(filename))
|
|
3014
|
except OSError:
|
|
3013
|
except OSError:
|
|
3015
|
pass
|
|
3014
|
pass
|
|
3016
|
with formatter.maybereopen(basefm, filename, opts) as fm:
|
|
3015
|
with formatter.maybereopen(basefm, filename, opts) as fm:
|
|
3017
|
data = ctx[path].data()
|
|
3016
|
data = ctx[path].data()
|
|
3018
|
if opts.get('decode'):
|
|
3017
|
if opts.get('decode'):
|
|
3019
|
data = repo.wwritedata(path, data)
|
|
3018
|
data = repo.wwritedata(path, data)
|
|
3020
|
fm.startitem()
|
|
3019
|
fm.startitem()
|
|
3021
|
fm.write('data', '%s', data)
|
|
3020
|
fm.write('data', '%s', data)
|
|
3022
|
fm.data(abspath=path, path=matcher.rel(path))
|
|
3021
|
fm.data(abspath=path, path=matcher.rel(path))
|
|
3023
|
|
|
3022
|
|
|
3024
|
# Automation often uses hg cat on single files, so special case it
|
|
3023
|
# Automation often uses hg cat on single files, so special case it
|
|
3025
|
# for performance to avoid the cost of parsing the manifest.
|
|
3024
|
# for performance to avoid the cost of parsing the manifest.
|
|
3026
|
if len(matcher.files()) == 1 and not matcher.anypats():
|
|
3025
|
if len(matcher.files()) == 1 and not matcher.anypats():
|
|
3027
|
file = matcher.files()[0]
|
|
3026
|
file = matcher.files()[0]
|
|
3028
|
mfl = repo.manifestlog
|
|
3027
|
mfl = repo.manifestlog
|
|
3029
|
mfnode = ctx.manifestnode()
|
|
3028
|
mfnode = ctx.manifestnode()
|
|
3030
|
try:
|
|
3029
|
try:
|
|
3031
|
if mfnode and mfl[mfnode].find(file)[0]:
|
|
3030
|
if mfnode and mfl[mfnode].find(file)[0]:
|
|
3032
|
write(file)
|
|
3031
|
write(file)
|
|
3033
|
return 0
|
|
3032
|
return 0
|
|
3034
|
except KeyError:
|
|
3033
|
except KeyError:
|
|
3035
|
pass
|
|
3034
|
pass
|
|
3036
|
|
|
3035
|
|
|
3037
|
for abs in ctx.walk(matcher):
|
|
3036
|
for abs in ctx.walk(matcher):
|
|
3038
|
write(abs)
|
|
3037
|
write(abs)
|
|
3039
|
err = 0
|
|
3038
|
err = 0
|
|
3040
|
|
|
3039
|
|
|
3041
|
for subpath in sorted(ctx.substate):
|
|
3040
|
for subpath in sorted(ctx.substate):
|
|
3042
|
sub = ctx.sub(subpath)
|
|
3041
|
sub = ctx.sub(subpath)
|
|
3043
|
try:
|
|
3042
|
try:
|
|
3044
|
submatch = matchmod.subdirmatcher(subpath, matcher)
|
|
3043
|
submatch = matchmod.subdirmatcher(subpath, matcher)
|
|
3045
|
|
|
3044
|
|
|
3046
|
if not sub.cat(submatch, basefm, fntemplate,
|
|
3045
|
if not sub.cat(submatch, basefm, fntemplate,
|
|
3047
|
os.path.join(prefix, sub._path),
|
|
3046
|
os.path.join(prefix, sub._path),
|
|
3048
|
**pycompat.strkwargs(opts)):
|
|
3047
|
**pycompat.strkwargs(opts)):
|
|
3049
|
err = 0
|
|
3048
|
err = 0
|
|
3050
|
except error.RepoLookupError:
|
|
3049
|
except error.RepoLookupError:
|
|
3051
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
3050
|
ui.status(_("skipping missing subrepository: %s\n")
|
|
3052
|
% os.path.join(prefix, subpath))
|
|
3051
|
% os.path.join(prefix, subpath))
|
|
3053
|
|
|
3052
|
|
|
3054
|
return err
|
|
3053
|
return err
|
|
3055
|
|
|
3054
|
|
|
3056
|
def commit(ui, repo, commitfunc, pats, opts):
|
|
3055
|
def commit(ui, repo, commitfunc, pats, opts):
|
|
3057
|
'''commit the specified files or all outstanding changes'''
|
|
3056
|
'''commit the specified files or all outstanding changes'''
|
|
3058
|
date = opts.get('date')
|
|
3057
|
date = opts.get('date')
|
|
3059
|
if date:
|
|
3058
|
if date:
|
|
3060
|
opts['date'] = util.parsedate(date)
|
|
3059
|
opts['date'] = util.parsedate(date)
|
|
3061
|
message = logmessage(ui, opts)
|
|
3060
|
message = logmessage(ui, opts)
|
|
3062
|
matcher = scmutil.match(repo[None], pats, opts)
|
|
3061
|
matcher = scmutil.match(repo[None], pats, opts)
|
|
3063
|
|
|
3062
|
|
|
3064
|
dsguard = None
|
|
3063
|
dsguard = None
|
|
3065
|
# extract addremove carefully -- this function can be called from a command
|
|
3064
|
# extract addremove carefully -- this function can be called from a command
|
|
3066
|
# that doesn't support addremove
|
|
3065
|
# that doesn't support addremove
|
|
3067
|
if opts.get('addremove'):
|
|
3066
|
if opts.get('addremove'):
|
|
3068
|
dsguard = dirstateguard.dirstateguard(repo, 'commit')
|
|
3067
|
dsguard = dirstateguard.dirstateguard(repo, 'commit')
|
|
3069
|
with dsguard or util.nullcontextmanager():
|
|
3068
|
with dsguard or util.nullcontextmanager():
|
|
3070
|
if dsguard:
|
|
3069
|
if dsguard:
|
|
3071
|
if scmutil.addremove(repo, matcher, "", opts) != 0:
|
|
3070
|
if scmutil.addremove(repo, matcher, "", opts) != 0:
|
|
3072
|
raise error.Abort(
|
|
3071
|
raise error.Abort(
|
|
3073
|
_("failed to mark all new/missing files as added/removed"))
|
|
3072
|
_("failed to mark all new/missing files as added/removed"))
|
|
3074
|
|
|
3073
|
|
|
3075
|
return commitfunc(ui, repo, message, matcher, opts)
|
|
3074
|
return commitfunc(ui, repo, message, matcher, opts)
|
|
3076
|
|
|
3075
|
|
|
3077
|
def samefile(f, ctx1, ctx2):
|
|
3076
|
def samefile(f, ctx1, ctx2):
|
|
3078
|
if f in ctx1.manifest():
|
|
3077
|
if f in ctx1.manifest():
|
|
3079
|
a = ctx1.filectx(f)
|
|
3078
|
a = ctx1.filectx(f)
|
|
3080
|
if f in ctx2.manifest():
|
|
3079
|
if f in ctx2.manifest():
|
|
3081
|
b = ctx2.filectx(f)
|
|
3080
|
b = ctx2.filectx(f)
|
|
3082
|
return (not a.cmp(b)
|
|
3081
|
return (not a.cmp(b)
|
|
3083
|
and a.flags() == b.flags())
|
|
3082
|
and a.flags() == b.flags())
|
|
3084
|
else:
|
|
3083
|
else:
|
|
3085
|
return False
|
|
3084
|
return False
|
|
3086
|
else:
|
|
3085
|
else:
|
|
3087
|
return f not in ctx2.manifest()
|
|
3086
|
return f not in ctx2.manifest()
|
|
3088
|
|
|
3087
|
|
|
3089
|
def amend(ui, repo, old, extra, pats, opts):
|
|
3088
|
def amend(ui, repo, old, extra, pats, opts):
|
|
3090
|
# avoid cycle context -> subrepo -> cmdutil
|
|
3089
|
# avoid cycle context -> subrepo -> cmdutil
|
|
3091
|
from . import context
|
|
3090
|
from . import context
|
|
3092
|
|
|
3091
|
|
|
3093
|
# amend will reuse the existing user if not specified, but the obsolete
|
|
3092
|
# amend will reuse the existing user if not specified, but the obsolete
|
|
3094
|
# marker creation requires that the current user's name is specified.
|
|
3093
|
# marker creation requires that the current user's name is specified.
|
|
3095
|
if obsolete.isenabled(repo, obsolete.createmarkersopt):
|
|
3094
|
if obsolete.isenabled(repo, obsolete.createmarkersopt):
|
|
3096
|
ui.username() # raise exception if username not set
|
|
3095
|
ui.username() # raise exception if username not set
|
|
3097
|
|
|
3096
|
|
|
3098
|
ui.note(_('amending changeset %s\n') % old)
|
|
3097
|
ui.note(_('amending changeset %s\n') % old)
|
|
3099
|
base = old.p1()
|
|
3098
|
base = old.p1()
|
|
3100
|
|
|
3099
|
|
|
3101
|
with repo.wlock(), repo.lock(), repo.transaction('amend'):
|
|
3100
|
with repo.wlock(), repo.lock(), repo.transaction('amend'):
|
|
3102
|
# Participating changesets:
|
|
3101
|
# Participating changesets:
|
|
3103
|
#
|
|
3102
|
#
|
|
3104
|
# wctx o - workingctx that contains changes from working copy
|
|
3103
|
# wctx o - workingctx that contains changes from working copy
|
|
3105
|
# | to go into amending commit
|
|
3104
|
# | to go into amending commit
|
|
3106
|
# |
|
|
3105
|
# |
|
|
3107
|
# old o - changeset to amend
|
|
3106
|
# old o - changeset to amend
|
|
3108
|
# |
|
|
3107
|
# |
|
|
3109
|
# base o - first parent of the changeset to amend
|
|
3108
|
# base o - first parent of the changeset to amend
|
|
3110
|
wctx = repo[None]
|
|
3109
|
wctx = repo[None]
|
|
3111
|
|
|
3110
|
|
|
3112
|
# Copy to avoid mutating input
|
|
3111
|
# Copy to avoid mutating input
|
|
3113
|
extra = extra.copy()
|
|
3112
|
extra = extra.copy()
|
|
3114
|
# Update extra dict from amended commit (e.g. to preserve graft
|
|
3113
|
# Update extra dict from amended commit (e.g. to preserve graft
|
|
3115
|
# source)
|
|
3114
|
# source)
|
|
3116
|
extra.update(old.extra())
|
|
3115
|
extra.update(old.extra())
|
|
3117
|
|
|
3116
|
|
|
3118
|
# Also update it from the from the wctx
|
|
3117
|
# Also update it from the from the wctx
|
|
3119
|
extra.update(wctx.extra())
|
|
3118
|
extra.update(wctx.extra())
|
|
3120
|
|
|
3119
|
|
|
3121
|
user = opts.get('user') or old.user()
|
|
3120
|
user = opts.get('user') or old.user()
|
|
3122
|
date = opts.get('date') or old.date()
|
|
3121
|
date = opts.get('date') or old.date()
|
|
3123
|
|
|
3122
|
|
|
3124
|
# Parse the date to allow comparison between date and old.date()
|
|
3123
|
# Parse the date to allow comparison between date and old.date()
|
|
3125
|
date = util.parsedate(date)
|
|
3124
|
date = util.parsedate(date)
|
|
3126
|
|
|
3125
|
|
|
3127
|
if len(old.parents()) > 1:
|
|
3126
|
if len(old.parents()) > 1:
|
|
3128
|
# ctx.files() isn't reliable for merges, so fall back to the
|
|
3127
|
# ctx.files() isn't reliable for merges, so fall back to the
|
|
3129
|
# slower repo.status() method
|
|
3128
|
# slower repo.status() method
|
|
3130
|
files = set([fn for st in repo.status(base, old)[:3]
|
|
3129
|
files = set([fn for st in repo.status(base, old)[:3]
|
|
3131
|
for fn in st])
|
|
3130
|
for fn in st])
|
|
3132
|
else:
|
|
3131
|
else:
|
|
3133
|
files = set(old.files())
|
|
3132
|
files = set(old.files())
|
|
3134
|
|
|
3133
|
|
|
3135
|
# add/remove the files to the working copy if the "addremove" option
|
|
3134
|
# add/remove the files to the working copy if the "addremove" option
|
|
3136
|
# was specified.
|
|
3135
|
# was specified.
|
|
3137
|
matcher = scmutil.match(wctx, pats, opts)
|
|
3136
|
matcher = scmutil.match(wctx, pats, opts)
|
|
3138
|
if (opts.get('addremove')
|
|
3137
|
if (opts.get('addremove')
|
|
3139
|
and scmutil.addremove(repo, matcher, "", opts)):
|
|
3138
|
and scmutil.addremove(repo, matcher, "", opts)):
|
|
3140
|
raise error.Abort(
|
|
3139
|
raise error.Abort(
|
|
3141
|
_("failed to mark all new/missing files as added/removed"))
|
|
3140
|
_("failed to mark all new/missing files as added/removed"))
|
|
3142
|
|
|
3141
|
|
|
3143
|
# Check subrepos. This depends on in-place wctx._status update in
|
|
3142
|
# Check subrepos. This depends on in-place wctx._status update in
|
|
3144
|
# subrepo.precommit(). To minimize the risk of this hack, we do
|
|
3143
|
# subrepo.precommit(). To minimize the risk of this hack, we do
|
|
3145
|
# nothing if .hgsub does not exist.
|
|
3144
|
# nothing if .hgsub does not exist.
|
|
3146
|
if '.hgsub' in wctx or '.hgsub' in old:
|
|
3145
|
if '.hgsub' in wctx or '.hgsub' in old:
|
|
3147
|
from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
|
|
3146
|
from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
|
|
3148
|
subs, commitsubs, newsubstate = subrepo.precommit(
|
|
3147
|
subs, commitsubs, newsubstate = subrepo.precommit(
|
|
3149
|
ui, wctx, wctx._status, matcher)
|
|
3148
|
ui, wctx, wctx._status, matcher)
|
|
3150
|
# amend should abort if commitsubrepos is enabled
|
|
3149
|
# amend should abort if commitsubrepos is enabled
|
|
3151
|
assert not commitsubs
|
|
3150
|
assert not commitsubs
|
|
3152
|
if subs:
|
|
3151
|
if subs:
|
|
3153
|
subrepo.writestate(repo, newsubstate)
|
|
3152
|
subrepo.writestate(repo, newsubstate)
|
|
3154
|
|
|
3153
|
|
|
3155
|
filestoamend = set(f for f in wctx.files() if matcher(f))
|
|
3154
|
filestoamend = set(f for f in wctx.files() if matcher(f))
|
|
3156
|
|
|
3155
|
|
|
3157
|
changes = (len(filestoamend) > 0)
|
|
3156
|
changes = (len(filestoamend) > 0)
|
|
3158
|
if changes:
|
|
3157
|
if changes:
|
|
3159
|
# Recompute copies (avoid recording a -> b -> a)
|
|
3158
|
# Recompute copies (avoid recording a -> b -> a)
|
|
3160
|
copied = copies.pathcopies(base, wctx, matcher)
|
|
3159
|
copied = copies.pathcopies(base, wctx, matcher)
|
|
3161
|
if old.p2:
|
|
3160
|
if old.p2:
|
|
3162
|
copied.update(copies.pathcopies(old.p2(), wctx, matcher))
|
|
3161
|
copied.update(copies.pathcopies(old.p2(), wctx, matcher))
|
|
3163
|
|
|
3162
|
|
|
3164
|
# Prune files which were reverted by the updates: if old
|
|
3163
|
# Prune files which were reverted by the updates: if old
|
|
3165
|
# introduced file X and the file was renamed in the working
|
|
3164
|
# introduced file X and the file was renamed in the working
|
|
3166
|
# copy, then those two files are the same and
|
|
3165
|
# copy, then those two files are the same and
|
|
3167
|
# we can discard X from our list of files. Likewise if X
|
|
3166
|
# we can discard X from our list of files. Likewise if X
|
|
3168
|
# was removed, it's no longer relevant. If X is missing (aka
|
|
3167
|
# was removed, it's no longer relevant. If X is missing (aka
|
|
3169
|
# deleted), old X must be preserved.
|
|
3168
|
# deleted), old X must be preserved.
|
|
3170
|
files.update(filestoamend)
|
|
3169
|
files.update(filestoamend)
|
|
3171
|
files = [f for f in files if (not samefile(f, wctx, base)
|
|
3170
|
files = [f for f in files if (not samefile(f, wctx, base)
|
|
3172
|
or f in wctx.deleted())]
|
|
3171
|
or f in wctx.deleted())]
|
|
3173
|
|
|
3172
|
|
|
3174
|
def filectxfn(repo, ctx_, path):
|
|
3173
|
def filectxfn(repo, ctx_, path):
|
|
3175
|
try:
|
|
3174
|
try:
|
|
3176
|
# If the file being considered is not amongst the files
|
|
3175
|
# If the file being considered is not amongst the files
|
|
3177
|
# to be amended, we should return the file context from the
|
|
3176
|
# to be amended, we should return the file context from the
|
|
3178
|
# old changeset. This avoids issues when only some files in
|
|
3177
|
# old changeset. This avoids issues when only some files in
|
|
3179
|
# the working copy are being amended but there are also
|
|
3178
|
# the working copy are being amended but there are also
|
|
3180
|
# changes to other files from the old changeset.
|
|
3179
|
# changes to other files from the old changeset.
|
|
3181
|
if path not in filestoamend:
|
|
3180
|
if path not in filestoamend:
|
|
3182
|
return old.filectx(path)
|
|
3181
|
return old.filectx(path)
|
|
3183
|
|
|
3182
|
|
|
3184
|
# Return None for removed files.
|
|
3183
|
# Return None for removed files.
|
|
3185
|
if path in wctx.removed():
|
|
3184
|
if path in wctx.removed():
|
|
3186
|
return None
|
|
3185
|
return None
|
|
3187
|
|
|
3186
|
|
|
3188
|
fctx = wctx[path]
|
|
3187
|
fctx = wctx[path]
|
|
3189
|
flags = fctx.flags()
|
|
3188
|
flags = fctx.flags()
|
|
3190
|
mctx = context.memfilectx(repo, ctx_,
|
|
3189
|
mctx = context.memfilectx(repo, ctx_,
|
|
3191
|
fctx.path(), fctx.data(),
|
|
3190
|
fctx.path(), fctx.data(),
|
|
3192
|
islink='l' in flags,
|
|
3191
|
islink='l' in flags,
|
|
3193
|
isexec='x' in flags,
|
|
3192
|
isexec='x' in flags,
|
|
3194
|
copied=copied.get(path))
|
|
3193
|
copied=copied.get(path))
|
|
3195
|
return mctx
|
|
3194
|
return mctx
|
|
3196
|
except KeyError:
|
|
3195
|
except KeyError:
|
|
3197
|
return None
|
|
3196
|
return None
|
|
3198
|
else:
|
|
3197
|
else:
|
|
3199
|
ui.note(_('copying changeset %s to %s\n') % (old, base))
|
|
3198
|
ui.note(_('copying changeset %s to %s\n') % (old, base))
|
|
3200
|
|
|
3199
|
|
|
3201
|
# Use version of files as in the old cset
|
|
3200
|
# Use version of files as in the old cset
|
|
3202
|
def filectxfn(repo, ctx_, path):
|
|
3201
|
def filectxfn(repo, ctx_, path):
|
|
3203
|
try:
|
|
3202
|
try:
|
|
3204
|
return old.filectx(path)
|
|
3203
|
return old.filectx(path)
|
|
3205
|
except KeyError:
|
|
3204
|
except KeyError:
|
|
3206
|
return None
|
|
3205
|
return None
|
|
3207
|
|
|
3206
|
|
|
3208
|
# See if we got a message from -m or -l, if not, open the editor with
|
|
3207
|
# See if we got a message from -m or -l, if not, open the editor with
|
|
3209
|
# the message of the changeset to amend.
|
|
3208
|
# the message of the changeset to amend.
|
|
3210
|
message = logmessage(ui, opts)
|
|
3209
|
message = logmessage(ui, opts)
|
|
3211
|
|
|
3210
|
|
|
3212
|
editform = mergeeditform(old, 'commit.amend')
|
|
3211
|
editform = mergeeditform(old, 'commit.amend')
|
|
3213
|
editor = getcommiteditor(editform=editform,
|
|
3212
|
editor = getcommiteditor(editform=editform,
|
|
3214
|
**pycompat.strkwargs(opts))
|
|
3213
|
**pycompat.strkwargs(opts))
|
|
3215
|
|
|
3214
|
|
|
3216
|
if not message:
|
|
3215
|
if not message:
|
|
3217
|
editor = getcommiteditor(edit=True, editform=editform)
|
|
3216
|
editor = getcommiteditor(edit=True, editform=editform)
|
|
3218
|
message = old.description()
|
|
3217
|
message = old.description()
|
|
3219
|
|
|
3218
|
|
|
3220
|
pureextra = extra.copy()
|
|
3219
|
pureextra = extra.copy()
|
|
3221
|
extra['amend_source'] = old.hex()
|
|
3220
|
extra['amend_source'] = old.hex()
|
|
3222
|
|
|
3221
|
|
|
3223
|
new = context.memctx(repo,
|
|
3222
|
new = context.memctx(repo,
|
|
3224
|
parents=[base.node(), old.p2().node()],
|
|
3223
|
parents=[base.node(), old.p2().node()],
|
|
3225
|
text=message,
|
|
3224
|
text=message,
|
|
3226
|
files=files,
|
|
3225
|
files=files,
|
|
3227
|
filectxfn=filectxfn,
|
|
3226
|
filectxfn=filectxfn,
|
|
3228
|
user=user,
|
|
3227
|
user=user,
|
|
3229
|
date=date,
|
|
3228
|
date=date,
|
|
3230
|
extra=extra,
|
|
3229
|
extra=extra,
|
|
3231
|
editor=editor)
|
|
3230
|
editor=editor)
|
|
3232
|
|
|
3231
|
|
|
3233
|
newdesc = changelog.stripdesc(new.description())
|
|
3232
|
newdesc = changelog.stripdesc(new.description())
|
|
3234
|
if ((not changes)
|
|
3233
|
if ((not changes)
|
|
3235
|
and newdesc == old.description()
|
|
3234
|
and newdesc == old.description()
|
|
3236
|
and user == old.user()
|
|
3235
|
and user == old.user()
|
|
3237
|
and date == old.date()
|
|
3236
|
and date == old.date()
|
|
3238
|
and pureextra == old.extra()):
|
|
3237
|
and pureextra == old.extra()):
|
|
3239
|
# nothing changed. continuing here would create a new node
|
|
3238
|
# nothing changed. continuing here would create a new node
|
|
3240
|
# anyway because of the amend_source noise.
|
|
3239
|
# anyway because of the amend_source noise.
|
|
3241
|
#
|
|
3240
|
#
|
|
3242
|
# This not what we expect from amend.
|
|
3241
|
# This not what we expect from amend.
|
|
3243
|
return old.node()
|
|
3242
|
return old.node()
|
|
3244
|
|
|
3243
|
|
|
3245
|
if opts.get('secret'):
|
|
3244
|
if opts.get('secret'):
|
|
3246
|
commitphase = 'secret'
|
|
3245
|
commitphase = 'secret'
|
|
3247
|
else:
|
|
3246
|
else:
|
|
3248
|
commitphase = old.phase()
|
|
3247
|
commitphase = old.phase()
|
|
3249
|
overrides = {('phases', 'new-commit'): commitphase}
|
|
3248
|
overrides = {('phases', 'new-commit'): commitphase}
|
|
3250
|
with ui.configoverride(overrides, 'amend'):
|
|
3249
|
with ui.configoverride(overrides, 'amend'):
|
|
3251
|
newid = repo.commitctx(new)
|
|
3250
|
newid = repo.commitctx(new)
|
|
3252
|
|
|
3251
|
|
|
3253
|
# Reroute the working copy parent to the new changeset
|
|
3252
|
# Reroute the working copy parent to the new changeset
|
|
3254
|
repo.setparents(newid, nullid)
|
|
3253
|
repo.setparents(newid, nullid)
|
|
3255
|
mapping = {old.node(): (newid,)}
|
|
3254
|
mapping = {old.node(): (newid,)}
|
|
3256
|
obsmetadata = None
|
|
3255
|
obsmetadata = None
|
|
3257
|
if opts.get('note'):
|
|
3256
|
if opts.get('note'):
|
|
3258
|
obsmetadata = {'note': opts['note']}
|
|
3257
|
obsmetadata = {'note': opts['note']}
|
|
3259
|
scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
|
|
3258
|
scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
|
|
3260
|
|
|
3259
|
|
|
3261
|
# Fixing the dirstate because localrepo.commitctx does not update
|
|
3260
|
# Fixing the dirstate because localrepo.commitctx does not update
|
|
3262
|
# it. This is rather convenient because we did not need to update
|
|
3261
|
# it. This is rather convenient because we did not need to update
|
|
3263
|
# the dirstate for all the files in the new commit which commitctx
|
|
3262
|
# the dirstate for all the files in the new commit which commitctx
|
|
3264
|
# could have done if it updated the dirstate. Now, we can
|
|
3263
|
# could have done if it updated the dirstate. Now, we can
|
|
3265
|
# selectively update the dirstate only for the amended files.
|
|
3264
|
# selectively update the dirstate only for the amended files.
|
|
3266
|
dirstate = repo.dirstate
|
|
3265
|
dirstate = repo.dirstate
|
|
3267
|
|
|
3266
|
|
|
3268
|
# Update the state of the files which were added and
|
|
3267
|
# Update the state of the files which were added and
|
|
3269
|
# and modified in the amend to "normal" in the dirstate.
|
|
3268
|
# and modified in the amend to "normal" in the dirstate.
|
|
3270
|
normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
|
|
3269
|
normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
|
|
3271
|
for f in normalfiles:
|
|
3270
|
for f in normalfiles:
|
|
3272
|
dirstate.normal(f)
|
|
3271
|
dirstate.normal(f)
|
|
3273
|
|
|
3272
|
|
|
3274
|
# Update the state of files which were removed in the amend
|
|
3273
|
# Update the state of files which were removed in the amend
|
|
3275
|
# to "removed" in the dirstate.
|
|
3274
|
# to "removed" in the dirstate.
|
|
3276
|
removedfiles = set(wctx.removed()) & filestoamend
|
|
3275
|
removedfiles = set(wctx.removed()) & filestoamend
|
|
3277
|
for f in removedfiles:
|
|
3276
|
for f in removedfiles:
|
|
3278
|
dirstate.drop(f)
|
|
3277
|
dirstate.drop(f)
|
|
3279
|
|
|
3278
|
|
|
3280
|
return newid
|
|
3279
|
return newid
|
|
3281
|
|
|
3280
|
|
|
3282
|
def commiteditor(repo, ctx, subs, editform=''):
|
|
3281
|
def commiteditor(repo, ctx, subs, editform=''):
|
|
3283
|
if ctx.description():
|
|
3282
|
if ctx.description():
|
|
3284
|
return ctx.description()
|
|
3283
|
return ctx.description()
|
|
3285
|
return commitforceeditor(repo, ctx, subs, editform=editform,
|
|
3284
|
return commitforceeditor(repo, ctx, subs, editform=editform,
|
|
3286
|
unchangedmessagedetection=True)
|
|
3285
|
unchangedmessagedetection=True)
|
|
3287
|
|
|
3286
|
|
|
3288
|
def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
|
|
3287
|
def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
|
|
3289
|
editform='', unchangedmessagedetection=False):
|
|
3288
|
editform='', unchangedmessagedetection=False):
|
|
3290
|
if not extramsg:
|
|
3289
|
if not extramsg:
|
|
3291
|
extramsg = _("Leave message empty to abort commit.")
|
|
3290
|
extramsg = _("Leave message empty to abort commit.")
|
|
3292
|
|
|
3291
|
|
|
3293
|
forms = [e for e in editform.split('.') if e]
|
|
3292
|
forms = [e for e in editform.split('.') if e]
|
|
3294
|
forms.insert(0, 'changeset')
|
|
3293
|
forms.insert(0, 'changeset')
|
|
3295
|
templatetext = None
|
|
3294
|
templatetext = None
|
|
3296
|
while forms:
|
|
3295
|
while forms:
|
|
3297
|
ref = '.'.join(forms)
|
|
3296
|
ref = '.'.join(forms)
|
|
3298
|
if repo.ui.config('committemplate', ref):
|
|
3297
|
if repo.ui.config('committemplate', ref):
|
|
3299
|
templatetext = committext = buildcommittemplate(
|
|
3298
|
templatetext = committext = buildcommittemplate(
|
|
3300
|
repo, ctx, subs, extramsg, ref)
|
|
3299
|
repo, ctx, subs, extramsg, ref)
|
|
3301
|
break
|
|
3300
|
break
|
|
3302
|
forms.pop()
|
|
3301
|
forms.pop()
|
|
3303
|
else:
|
|
3302
|
else:
|
|
3304
|
committext = buildcommittext(repo, ctx, subs, extramsg)
|
|
3303
|
committext = buildcommittext(repo, ctx, subs, extramsg)
|
|
3305
|
|
|
3304
|
|
|
3306
|
# run editor in the repository root
|
|
3305
|
# run editor in the repository root
|
|
3307
|
olddir = pycompat.getcwd()
|
|
3306
|
olddir = pycompat.getcwd()
|
|
3308
|
os.chdir(repo.root)
|
|
3307
|
os.chdir(repo.root)
|
|
3309
|
|
|
3308
|
|
|
3310
|
# make in-memory changes visible to external process
|
|
3309
|
# make in-memory changes visible to external process
|
|
3311
|
tr = repo.currenttransaction()
|
|
3310
|
tr = repo.currenttransaction()
|
|
3312
|
repo.dirstate.write(tr)
|
|
3311
|
repo.dirstate.write(tr)
|
|
3313
|
pending = tr and tr.writepending() and repo.root
|
|
3312
|
pending = tr and tr.writepending() and repo.root
|
|
3314
|
|
|
3313
|
|
|
3315
|
editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
|
|
3314
|
editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
|
|
3316
|
editform=editform, pending=pending,
|
|
3315
|
editform=editform, pending=pending,
|
|
3317
|
repopath=repo.path, action='commit')
|
|
3316
|
repopath=repo.path, action='commit')
|
|
3318
|
text = editortext
|
|
3317
|
text = editortext
|
|
3319
|
|
|
3318
|
|
|
3320
|
# strip away anything below this special string (used for editors that want
|
|
3319
|
# strip away anything below this special string (used for editors that want
|
|
3321
|
# to display the diff)
|
|
3320
|
# to display the diff)
|
|
3322
|
stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
|
|
3321
|
stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
|
|
3323
|
if stripbelow:
|
|
3322
|
if stripbelow:
|
|
3324
|
text = text[:stripbelow.start()]
|
|
3323
|
text = text[:stripbelow.start()]
|
|
3325
|
|
|
3324
|
|
|
3326
|
text = re.sub("(?m)^HG:.*(\n|$)", "", text)
|
|
3325
|
text = re.sub("(?m)^HG:.*(\n|$)", "", text)
|
|
3327
|
os.chdir(olddir)
|
|
3326
|
os.chdir(olddir)
|
|
3328
|
|
|
3327
|
|
|
3329
|
if finishdesc:
|
|
3328
|
if finishdesc:
|
|
3330
|
text = finishdesc(text)
|
|
3329
|
text = finishdesc(text)
|
|
3331
|
if not text.strip():
|
|
3330
|
if not text.strip():
|
|
3332
|
raise error.Abort(_("empty commit message"))
|
|
3331
|
raise error.Abort(_("empty commit message"))
|
|
3333
|
if unchangedmessagedetection and editortext == templatetext:
|
|
3332
|
if unchangedmessagedetection and editortext == templatetext:
|
|
3334
|
raise error.Abort(_("commit message unchanged"))
|
|
3333
|
raise error.Abort(_("commit message unchanged"))
|
|
3335
|
|
|
3334
|
|
|
3336
|
return text
|
|
3335
|
return text
|
|
3337
|
|
|
3336
|
|
|
3338
|
def buildcommittemplate(repo, ctx, subs, extramsg, ref):
|
|
3337
|
def buildcommittemplate(repo, ctx, subs, extramsg, ref):
|
|
3339
|
ui = repo.ui
|
|
3338
|
ui = repo.ui
|
|
3340
|
spec = formatter.templatespec(ref, None, None)
|
|
3339
|
spec = formatter.templatespec(ref, None, None)
|
|
3341
|
t = changeset_templater(ui, repo, spec, None, {}, False)
|
|
3340
|
t = changeset_templater(ui, repo, spec, None, {}, False)
|
|
3342
|
t.t.cache.update((k, templater.unquotestring(v))
|
|
3341
|
t.t.cache.update((k, templater.unquotestring(v))
|
|
3343
|
for k, v in repo.ui.configitems('committemplate'))
|
|
3342
|
for k, v in repo.ui.configitems('committemplate'))
|
|
3344
|
|
|
3343
|
|
|
3345
|
if not extramsg:
|
|
3344
|
if not extramsg:
|
|
3346
|
extramsg = '' # ensure that extramsg is string
|
|
3345
|
extramsg = '' # ensure that extramsg is string
|
|
3347
|
|
|
3346
|
|
|
3348
|
ui.pushbuffer()
|
|
3347
|
ui.pushbuffer()
|
|
3349
|
t.show(ctx, extramsg=extramsg)
|
|
3348
|
t.show(ctx, extramsg=extramsg)
|
|
3350
|
return ui.popbuffer()
|
|
3349
|
return ui.popbuffer()
|
|
3351
|
|
|
3350
|
|
|
3352
|
def hgprefix(msg):
|
|
3351
|
def hgprefix(msg):
|
|
3353
|
return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
|
|
3352
|
return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
|
|
3354
|
|
|
3353
|
|
|
3355
|
def buildcommittext(repo, ctx, subs, extramsg):
|
|
3354
|
def buildcommittext(repo, ctx, subs, extramsg):
|
|
3356
|
edittext = []
|
|
3355
|
edittext = []
|
|
3357
|
modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
|
|
3356
|
modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
|
|
3358
|
if ctx.description():
|
|
3357
|
if ctx.description():
|
|
3359
|
edittext.append(ctx.description())
|
|
3358
|
edittext.append(ctx.description())
|
|
3360
|
edittext.append("")
|
|
3359
|
edittext.append("")
|
|
3361
|
edittext.append("") # Empty line between message and comments.
|
|
3360
|
edittext.append("") # Empty line between message and comments.
|
|
3362
|
edittext.append(hgprefix(_("Enter commit message."
|
|
3361
|
edittext.append(hgprefix(_("Enter commit message."
|
|
3363
|
" Lines beginning with 'HG:' are removed.")))
|
|
3362
|
" Lines beginning with 'HG:' are removed.")))
|
|
3364
|
edittext.append(hgprefix(extramsg))
|
|
3363
|
edittext.append(hgprefix(extramsg))
|
|
3365
|
edittext.append("HG: --")
|
|
3364
|
edittext.append("HG: --")
|
|
3366
|
edittext.append(hgprefix(_("user: %s") % ctx.user()))
|
|
3365
|
edittext.append(hgprefix(_("user: %s") % ctx.user()))
|
|
3367
|
if ctx.p2():
|
|
3366
|
if ctx.p2():
|
|
3368
|
edittext.append(hgprefix(_("branch merge")))
|
|
3367
|
edittext.append(hgprefix(_("branch merge")))
|
|
3369
|
if ctx.branch():
|
|
3368
|
if ctx.branch():
|
|
3370
|
edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
|
|
3369
|
edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
|
|
3371
|
if bookmarks.isactivewdirparent(repo):
|
|
3370
|
if bookmarks.isactivewdirparent(repo):
|
|
3372
|
edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
|
|
3371
|
edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
|
|
3373
|
edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
|
|
3372
|
edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
|
|
3374
|
edittext.extend([hgprefix(_("added %s") % f) for f in added])
|
|
3373
|
edittext.extend([hgprefix(_("added %s") % f) for f in added])
|
|
3375
|
edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
|
|
3374
|
edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
|
|
3376
|
edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
|
|
3375
|
edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
|
|
3377
|
if not added and not modified and not removed:
|
|
3376
|
if not added and not modified and not removed:
|
|
3378
|
edittext.append(hgprefix(_("no files changed")))
|
|
3377
|
edittext.append(hgprefix(_("no files changed")))
|
|
3379
|
edittext.append("")
|
|
3378
|
edittext.append("")
|
|
3380
|
|
|
3379
|
|
|
3381
|
return "\n".join(edittext)
|
|
3380
|
return "\n".join(edittext)
|
|
3382
|
|
|
3381
|
|
|
3383
|
def commitstatus(repo, node, branch, bheads=None, opts=None):
|
|
3382
|
def commitstatus(repo, node, branch, bheads=None, opts=None):
|
|
3384
|
if opts is None:
|
|
3383
|
if opts is None:
|
|
3385
|
opts = {}
|
|
3384
|
opts = {}
|
|
3386
|
ctx = repo[node]
|
|
3385
|
ctx = repo[node]
|
|
3387
|
parents = ctx.parents()
|
|
3386
|
parents = ctx.parents()
|
|
3388
|
|
|
3387
|
|
|
3389
|
if (not opts.get('amend') and bheads and node not in bheads and not
|
|
3388
|
if (not opts.get('amend') and bheads and node not in bheads and not
|
|
3390
|
[x for x in parents if x.node() in bheads and x.branch() == branch]):
|
|
3389
|
[x for x in parents if x.node() in bheads and x.branch() == branch]):
|
|
3391
|
repo.ui.status(_('created new head\n'))
|
|
3390
|
repo.ui.status(_('created new head\n'))
|
|
3392
|
# The message is not printed for initial roots. For the other
|
|
3391
|
# The message is not printed for initial roots. For the other
|
|
3393
|
# changesets, it is printed in the following situations:
|
|
3392
|
# changesets, it is printed in the following situations:
|
|
3394
|
#
|
|
3393
|
#
|
|
3395
|
# Par column: for the 2 parents with ...
|
|
3394
|
# Par column: for the 2 parents with ...
|
|
3396
|
# N: null or no parent
|
|
3395
|
# N: null or no parent
|
|
3397
|
# B: parent is on another named branch
|
|
3396
|
# B: parent is on another named branch
|
|
3398
|
# C: parent is a regular non head changeset
|
|
3397
|
# C: parent is a regular non head changeset
|
|
3399
|
# H: parent was a branch head of the current branch
|
|
3398
|
# H: parent was a branch head of the current branch
|
|
3400
|
# Msg column: whether we print "created new head" message
|
|
3399
|
# Msg column: whether we print "created new head" message
|
|
3401
|
# In the following, it is assumed that there already exists some
|
|
3400
|
# In the following, it is assumed that there already exists some
|
|
3402
|
# initial branch heads of the current branch, otherwise nothing is
|
|
3401
|
# initial branch heads of the current branch, otherwise nothing is
|
|
3403
|
# printed anyway.
|
|
3402
|
# printed anyway.
|
|
3404
|
#
|
|
3403
|
#
|
|
3405
|
# Par Msg Comment
|
|
3404
|
# Par Msg Comment
|
|
3406
|
# N N y additional topo root
|
|
3405
|
# N N y additional topo root
|
|
3407
|
#
|
|
3406
|
#
|
|
3408
|
# B N y additional branch root
|
|
3407
|
# B N y additional branch root
|
|
3409
|
# C N y additional topo head
|
|
3408
|
# C N y additional topo head
|
|
3410
|
# H N n usual case
|
|
3409
|
# H N n usual case
|
|
3411
|
#
|
|
3410
|
#
|
|
3412
|
# B B y weird additional branch root
|
|
3411
|
# B B y weird additional branch root
|
|
3413
|
# C B y branch merge
|
|
3412
|
# C B y branch merge
|
|
3414
|
# H B n merge with named branch
|
|
3413
|
# H B n merge with named branch
|
|
3415
|
#
|
|
3414
|
#
|
|
3416
|
# C C y additional head from merge
|
|
3415
|
# C C y additional head from merge
|
|
3417
|
# C H n merge with a head
|
|
3416
|
# C H n merge with a head
|
|
3418
|
#
|
|
3417
|
#
|
|
3419
|
# H H n head merge: head count decreases
|
|
3418
|
# H H n head merge: head count decreases
|
|
3420
|
|
|
3419
|
|
|
3421
|
if not opts.get('close_branch'):
|
|
3420
|
if not opts.get('close_branch'):
|
|
3422
|
for r in parents:
|
|
3421
|
for r in parents:
|
|
3423
|
if r.closesbranch() and r.branch() == branch:
|
|
3422
|
if r.closesbranch() and r.branch() == branch:
|
|
3424
|
repo.ui.status(_('reopening closed branch head %d\n') % r)
|
|
3423
|
repo.ui.status(_('reopening closed branch head %d\n') % r)
|
|
3425
|
|
|
3424
|
|
|
3426
|
if repo.ui.debugflag:
|
|
3425
|
if repo.ui.debugflag:
|
|
3427
|
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
|
|
3426
|
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
|
|
3428
|
elif repo.ui.verbose:
|
|
3427
|
elif repo.ui.verbose:
|
|
3429
|
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
|
|
3428
|
repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
|
|
3430
|
|
|
3429
|
|
|
3431
|
def postcommitstatus(repo, pats, opts):
|
|
3430
|
def postcommitstatus(repo, pats, opts):
|
|
3432
|
return repo.status(match=scmutil.match(repo[None], pats, opts))
|
|
3431
|
return repo.status(match=scmutil.match(repo[None], pats, opts))
|
|
3433
|
|
|
3432
|
|
|
3434
|
def revert(ui, repo, ctx, parents, *pats, **opts):
|
|
3433
|
def revert(ui, repo, ctx, parents, *pats, **opts):
|
|
3435
|
opts = pycompat.byteskwargs(opts)
|
|
3434
|
opts = pycompat.byteskwargs(opts)
|
|
3436
|
parent, p2 = parents
|
|
3435
|
parent, p2 = parents
|
|
3437
|
node = ctx.node()
|
|
3436
|
node = ctx.node()
|
|
3438
|
|
|
3437
|
|
|
3439
|
mf = ctx.manifest()
|
|
3438
|
mf = ctx.manifest()
|
|
3440
|
if node == p2:
|
|
3439
|
if node == p2:
|
|
3441
|
parent = p2
|
|
3440
|
parent = p2
|
|
3442
|
|
|
3441
|
|
|
3443
|
# need all matching names in dirstate and manifest of target rev,
|
|
3442
|
# need all matching names in dirstate and manifest of target rev,
|
|
3444
|
# so have to walk both. do not print errors if files exist in one
|
|
3443
|
# so have to walk both. do not print errors if files exist in one
|
|
3445
|
# but not other. in both cases, filesets should be evaluated against
|
|
3444
|
# but not other. in both cases, filesets should be evaluated against
|
|
3446
|
# workingctx to get consistent result (issue4497). this means 'set:**'
|
|
3445
|
# workingctx to get consistent result (issue4497). this means 'set:**'
|
|
3447
|
# cannot be used to select missing files from target rev.
|
|
3446
|
# cannot be used to select missing files from target rev.
|
|
3448
|
|
|
3447
|
|
|
3449
|
# `names` is a mapping for all elements in working copy and target revision
|
|
3448
|
# `names` is a mapping for all elements in working copy and target revision
|
|
3450
|
# The mapping is in the form:
|
|
3449
|
# The mapping is in the form:
|
|
3451
|
# <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
|
|
3450
|
# <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
|
|
3452
|
names = {}
|
|
3451
|
names = {}
|
|
3453
|
|
|
3452
|
|
|
3454
|
with repo.wlock():
|
|
3453
|
with repo.wlock():
|
|
3455
|
## filling of the `names` mapping
|
|
3454
|
## filling of the `names` mapping
|
|
3456
|
# walk dirstate to fill `names`
|
|
3455
|
# walk dirstate to fill `names`
|
|
3457
|
|
|
3456
|
|
|
3458
|
interactive = opts.get('interactive', False)
|
|
3457
|
interactive = opts.get('interactive', False)
|
|
3459
|
wctx = repo[None]
|
|
3458
|
wctx = repo[None]
|
|
3460
|
m = scmutil.match(wctx, pats, opts)
|
|
3459
|
m = scmutil.match(wctx, pats, opts)
|
|
3461
|
|
|
3460
|
|
|
3462
|
# we'll need this later
|
|
3461
|
# we'll need this later
|
|
3463
|
targetsubs = sorted(s for s in wctx.substate if m(s))
|
|
3462
|
targetsubs = sorted(s for s in wctx.substate if m(s))
|
|
3464
|
|
|
3463
|
|
|
3465
|
if not m.always():
|
|
3464
|
if not m.always():
|
|
3466
|
matcher = matchmod.badmatch(m, lambda x, y: False)
|
|
3465
|
matcher = matchmod.badmatch(m, lambda x, y: False)
|
|
3467
|
for abs in wctx.walk(matcher):
|
|
3466
|
for abs in wctx.walk(matcher):
|
|
3468
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3467
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3469
|
|
|
3468
|
|
|
3470
|
# walk target manifest to fill `names`
|
|
3469
|
# walk target manifest to fill `names`
|
|
3471
|
|
|
3470
|
|
|
3472
|
def badfn(path, msg):
|
|
3471
|
def badfn(path, msg):
|
|
3473
|
if path in names:
|
|
3472
|
if path in names:
|
|
3474
|
return
|
|
3473
|
return
|
|
3475
|
if path in ctx.substate:
|
|
3474
|
if path in ctx.substate:
|
|
3476
|
return
|
|
3475
|
return
|
|
3477
|
path_ = path + '/'
|
|
3476
|
path_ = path + '/'
|
|
3478
|
for f in names:
|
|
3477
|
for f in names:
|
|
3479
|
if f.startswith(path_):
|
|
3478
|
if f.startswith(path_):
|
|
3480
|
return
|
|
3479
|
return
|
|
3481
|
ui.warn("%s: %s\n" % (m.rel(path), msg))
|
|
3480
|
ui.warn("%s: %s\n" % (m.rel(path), msg))
|
|
3482
|
|
|
3481
|
|
|
3483
|
for abs in ctx.walk(matchmod.badmatch(m, badfn)):
|
|
3482
|
for abs in ctx.walk(matchmod.badmatch(m, badfn)):
|
|
3484
|
if abs not in names:
|
|
3483
|
if abs not in names:
|
|
3485
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3484
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3486
|
|
|
3485
|
|
|
3487
|
# Find status of all file in `names`.
|
|
3486
|
# Find status of all file in `names`.
|
|
3488
|
m = scmutil.matchfiles(repo, names)
|
|
3487
|
m = scmutil.matchfiles(repo, names)
|
|
3489
|
|
|
3488
|
|
|
3490
|
changes = repo.status(node1=node, match=m,
|
|
3489
|
changes = repo.status(node1=node, match=m,
|
|
3491
|
unknown=True, ignored=True, clean=True)
|
|
3490
|
unknown=True, ignored=True, clean=True)
|
|
3492
|
else:
|
|
3491
|
else:
|
|
3493
|
changes = repo.status(node1=node, match=m)
|
|
3492
|
changes = repo.status(node1=node, match=m)
|
|
3494
|
for kind in changes:
|
|
3493
|
for kind in changes:
|
|
3495
|
for abs in kind:
|
|
3494
|
for abs in kind:
|
|
3496
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3495
|
names[abs] = m.rel(abs), m.exact(abs)
|
|
3497
|
|
|
3496
|
|
|
3498
|
m = scmutil.matchfiles(repo, names)
|
|
3497
|
m = scmutil.matchfiles(repo, names)
|
|
3499
|
|
|
3498
|
|
|
3500
|
modified = set(changes.modified)
|
|
3499
|
modified = set(changes.modified)
|
|
3501
|
added = set(changes.added)
|
|
3500
|
added = set(changes.added)
|
|
3502
|
removed = set(changes.removed)
|
|
3501
|
removed = set(changes.removed)
|
|
3503
|
_deleted = set(changes.deleted)
|
|
3502
|
_deleted = set(changes.deleted)
|
|
3504
|
unknown = set(changes.unknown)
|
|
3503
|
unknown = set(changes.unknown)
|
|
3505
|
unknown.update(changes.ignored)
|
|
3504
|
unknown.update(changes.ignored)
|
|
3506
|
clean = set(changes.clean)
|
|
3505
|
clean = set(changes.clean)
|
|
3507
|
modadded = set()
|
|
3506
|
modadded = set()
|
|
3508
|
|
|
3507
|
|
|
3509
|
# We need to account for the state of the file in the dirstate,
|
|
3508
|
# We need to account for the state of the file in the dirstate,
|
|
3510
|
# even when we revert against something else than parent. This will
|
|
3509
|
# even when we revert against something else than parent. This will
|
|
3511
|
# slightly alter the behavior of revert (doing back up or not, delete
|
|
3510
|
# slightly alter the behavior of revert (doing back up or not, delete
|
|
3512
|
# or just forget etc).
|
|
3511
|
# or just forget etc).
|
|
3513
|
if parent == node:
|
|
3512
|
if parent == node:
|
|
3514
|
dsmodified = modified
|
|
3513
|
dsmodified = modified
|
|
3515
|
dsadded = added
|
|
3514
|
dsadded = added
|
|
3516
|
dsremoved = removed
|
|
3515
|
dsremoved = removed
|
|
3517
|
# store all local modifications, useful later for rename detection
|
|
3516
|
# store all local modifications, useful later for rename detection
|
|
3518
|
localchanges = dsmodified | dsadded
|
|
3517
|
localchanges = dsmodified | dsadded
|
|
3519
|
modified, added, removed = set(), set(), set()
|
|
3518
|
modified, added, removed = set(), set(), set()
|
|
3520
|
else:
|
|
3519
|
else:
|
|
3521
|
changes = repo.status(node1=parent, match=m)
|
|
3520
|
changes = repo.status(node1=parent, match=m)
|
|
3522
|
dsmodified = set(changes.modified)
|
|
3521
|
dsmodified = set(changes.modified)
|
|
3523
|
dsadded = set(changes.added)
|
|
3522
|
dsadded = set(changes.added)
|
|
3524
|
dsremoved = set(changes.removed)
|
|
3523
|
dsremoved = set(changes.removed)
|
|
3525
|
# store all local modifications, useful later for rename detection
|
|
3524
|
# store all local modifications, useful later for rename detection
|
|
3526
|
localchanges = dsmodified | dsadded
|
|
3525
|
localchanges = dsmodified | dsadded
|
|
3527
|
|
|
3526
|
|
|
3528
|
# only take into account for removes between wc and target
|
|
3527
|
# only take into account for removes between wc and target
|
|
3529
|
clean |= dsremoved - removed
|
|
3528
|
clean |= dsremoved - removed
|
|
3530
|
dsremoved &= removed
|
|
3529
|
dsremoved &= removed
|
|
3531
|
# distinct between dirstate remove and other
|
|
3530
|
# distinct between dirstate remove and other
|
|
3532
|
removed -= dsremoved
|
|
3531
|
removed -= dsremoved
|
|
3533
|
|
|
3532
|
|
|
3534
|
modadded = added & dsmodified
|
|
3533
|
modadded = added & dsmodified
|
|
3535
|
added -= modadded
|
|
3534
|
added -= modadded
|
|
3536
|
|
|
3535
|
|
|
3537
|
# tell newly modified apart.
|
|
3536
|
# tell newly modified apart.
|
|
3538
|
dsmodified &= modified
|
|
3537
|
dsmodified &= modified
|
|
3539
|
dsmodified |= modified & dsadded # dirstate added may need backup
|
|
3538
|
dsmodified |= modified & dsadded # dirstate added may need backup
|
|
3540
|
modified -= dsmodified
|
|
3539
|
modified -= dsmodified
|
|
3541
|
|
|
3540
|
|
|
3542
|
# We need to wait for some post-processing to update this set
|
|
3541
|
# We need to wait for some post-processing to update this set
|
|
3543
|
# before making the distinction. The dirstate will be used for
|
|
3542
|
# before making the distinction. The dirstate will be used for
|
|
3544
|
# that purpose.
|
|
3543
|
# that purpose.
|
|
3545
|
dsadded = added
|
|
3544
|
dsadded = added
|
|
3546
|
|
|
3545
|
|
|
3547
|
# in case of merge, files that are actually added can be reported as
|
|
3546
|
# in case of merge, files that are actually added can be reported as
|
|
3548
|
# modified, we need to post process the result
|
|
3547
|
# modified, we need to post process the result
|
|
3549
|
if p2 != nullid:
|
|
3548
|
if p2 != nullid:
|
|
3550
|
mergeadd = set(dsmodified)
|
|
3549
|
mergeadd = set(dsmodified)
|
|
3551
|
for path in dsmodified:
|
|
3550
|
for path in dsmodified:
|
|
3552
|
if path in mf:
|
|
3551
|
if path in mf:
|
|
3553
|
mergeadd.remove(path)
|
|
3552
|
mergeadd.remove(path)
|
|
3554
|
dsadded |= mergeadd
|
|
3553
|
dsadded |= mergeadd
|
|
3555
|
dsmodified -= mergeadd
|
|
3554
|
dsmodified -= mergeadd
|
|
3556
|
|
|
3555
|
|
|
3557
|
# if f is a rename, update `names` to also revert the source
|
|
3556
|
# if f is a rename, update `names` to also revert the source
|
|
3558
|
cwd = repo.getcwd()
|
|
3557
|
cwd = repo.getcwd()
|
|
3559
|
for f in localchanges:
|
|
3558
|
for f in localchanges:
|
|
3560
|
src = repo.dirstate.copied(f)
|
|
3559
|
src = repo.dirstate.copied(f)
|
|
3561
|
# XXX should we check for rename down to target node?
|
|
3560
|
# XXX should we check for rename down to target node?
|
|
3562
|
if src and src not in names and repo.dirstate[src] == 'r':
|
|
3561
|
if src and src not in names and repo.dirstate[src] == 'r':
|
|
3563
|
dsremoved.add(src)
|
|
3562
|
dsremoved.add(src)
|
|
3564
|
names[src] = (repo.pathto(src, cwd), True)
|
|
3563
|
names[src] = (repo.pathto(src, cwd), True)
|
|
3565
|
|
|
3564
|
|
|
3566
|
# determine the exact nature of the deleted changesets
|
|
3565
|
# determine the exact nature of the deleted changesets
|
|
3567
|
deladded = set(_deleted)
|
|
3566
|
deladded = set(_deleted)
|
|
3568
|
for path in _deleted:
|
|
3567
|
for path in _deleted:
|
|
3569
|
if path in mf:
|
|
3568
|
if path in mf:
|
|
3570
|
deladded.remove(path)
|
|
3569
|
deladded.remove(path)
|
|
3571
|
deleted = _deleted - deladded
|
|
3570
|
deleted = _deleted - deladded
|
|
3572
|
|
|
3571
|
|
|
3573
|
# distinguish between file to forget and the other
|
|
3572
|
# distinguish between file to forget and the other
|
|
3574
|
added = set()
|
|
3573
|
added = set()
|
|
3575
|
for abs in dsadded:
|
|
3574
|
for abs in dsadded:
|
|
3576
|
if repo.dirstate[abs] != 'a':
|
|
3575
|
if repo.dirstate[abs] != 'a':
|
|
3577
|
added.add(abs)
|
|
3576
|
added.add(abs)
|
|
3578
|
dsadded -= added
|
|
3577
|
dsadded -= added
|
|
3579
|
|
|
3578
|
|
|
3580
|
for abs in deladded:
|
|
3579
|
for abs in deladded:
|
|
3581
|
if repo.dirstate[abs] == 'a':
|
|
3580
|
if repo.dirstate[abs] == 'a':
|
|
3582
|
dsadded.add(abs)
|
|
3581
|
dsadded.add(abs)
|
|
3583
|
deladded -= dsadded
|
|
3582
|
deladded -= dsadded
|
|
3584
|
|
|
3583
|
|
|
3585
|
# For files marked as removed, we check if an unknown file is present at
|
|
3584
|
# For files marked as removed, we check if an unknown file is present at
|
|
3586
|
# the same path. If a such file exists it may need to be backed up.
|
|
3585
|
# the same path. If a such file exists it may need to be backed up.
|
|
3587
|
# Making the distinction at this stage helps have simpler backup
|
|
3586
|
# Making the distinction at this stage helps have simpler backup
|
|
3588
|
# logic.
|
|
3587
|
# logic.
|
|
3589
|
removunk = set()
|
|
3588
|
removunk = set()
|
|
3590
|
for abs in removed:
|
|
3589
|
for abs in removed:
|
|
3591
|
target = repo.wjoin(abs)
|
|
3590
|
target = repo.wjoin(abs)
|
|
3592
|
if os.path.lexists(target):
|
|
3591
|
if os.path.lexists(target):
|
|
3593
|
removunk.add(abs)
|
|
3592
|
removunk.add(abs)
|
|
3594
|
removed -= removunk
|
|
3593
|
removed -= removunk
|
|
3595
|
|
|
3594
|
|
|
3596
|
dsremovunk = set()
|
|
3595
|
dsremovunk = set()
|
|
3597
|
for abs in dsremoved:
|
|
3596
|
for abs in dsremoved:
|
|
3598
|
target = repo.wjoin(abs)
|
|
3597
|
target = repo.wjoin(abs)
|
|
3599
|
if os.path.lexists(target):
|
|
3598
|
if os.path.lexists(target):
|
|
3600
|
dsremovunk.add(abs)
|
|
3599
|
dsremovunk.add(abs)
|
|
3601
|
dsremoved -= dsremovunk
|
|
3600
|
dsremoved -= dsremovunk
|
|
3602
|
|
|
3601
|
|
|
3603
|
# action to be actually performed by revert
|
|
3602
|
# action to be actually performed by revert
|
|
3604
|
# (<list of file>, message>) tuple
|
|
3603
|
# (<list of file>, message>) tuple
|
|
3605
|
actions = {'revert': ([], _('reverting %s\n')),
|
|
3604
|
actions = {'revert': ([], _('reverting %s\n')),
|
|
3606
|
'add': ([], _('adding %s\n')),
|
|
3605
|
'add': ([], _('adding %s\n')),
|
|
3607
|
'remove': ([], _('removing %s\n')),
|
|
3606
|
'remove': ([], _('removing %s\n')),
|
|
3608
|
'drop': ([], _('removing %s\n')),
|
|
3607
|
'drop': ([], _('removing %s\n')),
|
|
3609
|
'forget': ([], _('forgetting %s\n')),
|
|
3608
|
'forget': ([], _('forgetting %s\n')),
|
|
3610
|
'undelete': ([], _('undeleting %s\n')),
|
|
3609
|
'undelete': ([], _('undeleting %s\n')),
|
|
3611
|
'noop': (None, _('no changes needed to %s\n')),
|
|
3610
|
'noop': (None, _('no changes needed to %s\n')),
|
|
3612
|
'unknown': (None, _('file not managed: %s\n')),
|
|
3611
|
'unknown': (None, _('file not managed: %s\n')),
|
|
3613
|
}
|
|
3612
|
}
|
|
3614
|
|
|
3613
|
|
|
3615
|
# "constant" that convey the backup strategy.
|
|
3614
|
# "constant" that convey the backup strategy.
|
|
3616
|
# All set to `discard` if `no-backup` is set do avoid checking
|
|
3615
|
# All set to `discard` if `no-backup` is set do avoid checking
|
|
3617
|
# no_backup lower in the code.
|
|
3616
|
# no_backup lower in the code.
|
|
3618
|
# These values are ordered for comparison purposes
|
|
3617
|
# These values are ordered for comparison purposes
|
|
3619
|
backupinteractive = 3 # do backup if interactively modified
|
|
3618
|
backupinteractive = 3 # do backup if interactively modified
|
|
3620
|
backup = 2 # unconditionally do backup
|
|
3619
|
backup = 2 # unconditionally do backup
|
|
3621
|
check = 1 # check if the existing file differs from target
|
|
3620
|
check = 1 # check if the existing file differs from target
|
|
3622
|
discard = 0 # never do backup
|
|
3621
|
discard = 0 # never do backup
|
|
3623
|
if opts.get('no_backup'):
|
|
3622
|
if opts.get('no_backup'):
|
|
3624
|
backupinteractive = backup = check = discard
|
|
3623
|
backupinteractive = backup = check = discard
|
|
3625
|
if interactive:
|
|
3624
|
if interactive:
|
|
3626
|
dsmodifiedbackup = backupinteractive
|
|
3625
|
dsmodifiedbackup = backupinteractive
|
|
3627
|
else:
|
|
3626
|
else:
|
|
3628
|
dsmodifiedbackup = backup
|
|
3627
|
dsmodifiedbackup = backup
|
|
3629
|
tobackup = set()
|
|
3628
|
tobackup = set()
|
|
3630
|
|
|
3629
|
|
|
3631
|
backupanddel = actions['remove']
|
|
3630
|
backupanddel = actions['remove']
|
|
3632
|
if not opts.get('no_backup'):
|
|
3631
|
if not opts.get('no_backup'):
|
|
3633
|
backupanddel = actions['drop']
|
|
3632
|
backupanddel = actions['drop']
|
|
3634
|
|
|
3633
|
|
|
3635
|
disptable = (
|
|
3634
|
disptable = (
|
|
3636
|
# dispatch table:
|
|
3635
|
# dispatch table:
|
|
3637
|
# file state
|
|
3636
|
# file state
|
|
3638
|
# action
|
|
3637
|
# action
|
|
3639
|
# make backup
|
|
3638
|
# make backup
|
|
3640
|
|
|
3639
|
|
|
3641
|
## Sets that results that will change file on disk
|
|
3640
|
## Sets that results that will change file on disk
|
|
3642
|
# Modified compared to target, no local change
|
|
3641
|
# Modified compared to target, no local change
|
|
3643
|
(modified, actions['revert'], discard),
|
|
3642
|
(modified, actions['revert'], discard),
|
|
3644
|
# Modified compared to target, but local file is deleted
|
|
3643
|
# Modified compared to target, but local file is deleted
|
|
3645
|
(deleted, actions['revert'], discard),
|
|
3644
|
(deleted, actions['revert'], discard),
|
|
3646
|
# Modified compared to target, local change
|
|
3645
|
# Modified compared to target, local change
|
|
3647
|
(dsmodified, actions['revert'], dsmodifiedbackup),
|
|
3646
|
(dsmodified, actions['revert'], dsmodifiedbackup),
|
|
3648
|
# Added since target
|
|
3647
|
# Added since target
|
|
3649
|
(added, actions['remove'], discard),
|
|
3648
|
(added, actions['remove'], discard),
|
|
3650
|
# Added in working directory
|
|
3649
|
# Added in working directory
|
|
3651
|
(dsadded, actions['forget'], discard),
|
|
3650
|
(dsadded, actions['forget'], discard),
|
|
3652
|
# Added since target, have local modification
|
|
3651
|
# Added since target, have local modification
|
|
3653
|
(modadded, backupanddel, backup),
|
|
3652
|
(modadded, backupanddel, backup),
|
|
3654
|
# Added since target but file is missing in working directory
|
|
3653
|
# Added since target but file is missing in working directory
|
|
3655
|
(deladded, actions['drop'], discard),
|
|
3654
|
(deladded, actions['drop'], discard),
|
|
3656
|
# Removed since target, before working copy parent
|
|
3655
|
# Removed since target, before working copy parent
|
|
3657
|
(removed, actions['add'], discard),
|
|
3656
|
(removed, actions['add'], discard),
|
|
3658
|
# Same as `removed` but an unknown file exists at the same path
|
|
3657
|
# Same as `removed` but an unknown file exists at the same path
|
|
3659
|
(removunk, actions['add'], check),
|
|
3658
|
(removunk, actions['add'], check),
|
|
3660
|
# Removed since targe, marked as such in working copy parent
|
|
3659
|
# Removed since targe, marked as such in working copy parent
|
|
3661
|
(dsremoved, actions['undelete'], discard),
|
|
3660
|
(dsremoved, actions['undelete'], discard),
|
|
3662
|
# Same as `dsremoved` but an unknown file exists at the same path
|
|
3661
|
# Same as `dsremoved` but an unknown file exists at the same path
|
|
3663
|
(dsremovunk, actions['undelete'], check),
|
|
3662
|
(dsremovunk, actions['undelete'], check),
|
|
3664
|
## the following sets does not result in any file changes
|
|
3663
|
## the following sets does not result in any file changes
|
|
3665
|
# File with no modification
|
|
3664
|
# File with no modification
|
|
3666
|
(clean, actions['noop'], discard),
|
|
3665
|
(clean, actions['noop'], discard),
|
|
3667
|
# Existing file, not tracked anywhere
|
|
3666
|
# Existing file, not tracked anywhere
|
|
3668
|
(unknown, actions['unknown'], discard),
|
|
3667
|
(unknown, actions['unknown'], discard),
|
|
3669
|
)
|
|
3668
|
)
|
|
3670
|
|
|
3669
|
|
|
3671
|
for abs, (rel, exact) in sorted(names.items()):
|
|
3670
|
for abs, (rel, exact) in sorted(names.items()):
|
|
3672
|
# target file to be touch on disk (relative to cwd)
|
|
3671
|
# target file to be touch on disk (relative to cwd)
|
|
3673
|
target = repo.wjoin(abs)
|
|
3672
|
target = repo.wjoin(abs)
|
|
3674
|
# search the entry in the dispatch table.
|
|
3673
|
# search the entry in the dispatch table.
|
|
3675
|
# if the file is in any of these sets, it was touched in the working
|
|
3674
|
# if the file is in any of these sets, it was touched in the working
|
|
3676
|
# directory parent and we are sure it needs to be reverted.
|
|
3675
|
# directory parent and we are sure it needs to be reverted.
|
|
3677
|
for table, (xlist, msg), dobackup in disptable:
|
|
3676
|
for table, (xlist, msg), dobackup in disptable:
|
|
3678
|
if abs not in table:
|
|
3677
|
if abs not in table:
|
|
3679
|
continue
|
|
3678
|
continue
|
|
3680
|
if xlist is not None:
|
|
3679
|
if xlist is not None:
|
|
3681
|
xlist.append(abs)
|
|
3680
|
xlist.append(abs)
|
|
3682
|
if dobackup:
|
|
3681
|
if dobackup:
|
|
3683
|
# If in interactive mode, don't automatically create
|
|
3682
|
# If in interactive mode, don't automatically create
|
|
3684
|
# .orig files (issue4793)
|
|
3683
|
# .orig files (issue4793)
|
|
3685
|
if dobackup == backupinteractive:
|
|
3684
|
if dobackup == backupinteractive:
|
|
3686
|
tobackup.add(abs)
|
|
3685
|
tobackup.add(abs)
|
|
3687
|
elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
|
|
3686
|
elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
|
|
3688
|
bakname = scmutil.origpath(ui, repo, rel)
|
|
3687
|
bakname = scmutil.origpath(ui, repo, rel)
|
|
3689
|
ui.note(_('saving current version of %s as %s\n') %
|
|
3688
|
ui.note(_('saving current version of %s as %s\n') %
|
|
3690
|
(rel, bakname))
|
|
3689
|
(rel, bakname))
|
|
3691
|
if not opts.get('dry_run'):
|
|
3690
|
if not opts.get('dry_run'):
|
|
3692
|
if interactive:
|
|
3691
|
if interactive:
|
|
3693
|
util.copyfile(target, bakname)
|
|
3692
|
util.copyfile(target, bakname)
|
|
3694
|
else:
|
|
3693
|
else:
|
|
3695
|
util.rename(target, bakname)
|
|
3694
|
util.rename(target, bakname)
|
|
3696
|
if ui.verbose or not exact:
|
|
3695
|
if ui.verbose or not exact:
|
|
3697
|
if not isinstance(msg, bytes):
|
|
3696
|
if not isinstance(msg, bytes):
|
|
3698
|
msg = msg(abs)
|
|
3697
|
msg = msg(abs)
|
|
3699
|
ui.status(msg % rel)
|
|
3698
|
ui.status(msg % rel)
|
|
3700
|
elif exact:
|
|
3699
|
elif exact:
|
|
3701
|
ui.warn(msg % rel)
|
|
3700
|
ui.warn(msg % rel)
|
|
3702
|
break
|
|
3701
|
break
|
|
3703
|
|
|
3702
|
|
|
3704
|
if not opts.get('dry_run'):
|
|
3703
|
if not opts.get('dry_run'):
|
|
3705
|
needdata = ('revert', 'add', 'undelete')
|
|
3704
|
needdata = ('revert', 'add', 'undelete')
|
|
3706
|
_revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
|
|
3705
|
_revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
|
|
3707
|
_performrevert(repo, parents, ctx, actions, interactive, tobackup)
|
|
3706
|
_performrevert(repo, parents, ctx, actions, interactive, tobackup)
|
|
3708
|
|
|
3707
|
|
|
3709
|
if targetsubs:
|
|
3708
|
if targetsubs:
|
|
3710
|
# Revert the subrepos on the revert list
|
|
3709
|
# Revert the subrepos on the revert list
|
|
3711
|
for sub in targetsubs:
|
|
3710
|
for sub in targetsubs:
|
|
3712
|
try:
|
|
3711
|
try:
|
|
3713
|
wctx.sub(sub).revert(ctx.substate[sub], *pats,
|
|
3712
|
wctx.sub(sub).revert(ctx.substate[sub], *pats,
|
|
3714
|
**pycompat.strkwargs(opts))
|
|
3713
|
**pycompat.strkwargs(opts))
|
|
3715
|
except KeyError:
|
|
3714
|
except KeyError:
|
|
3716
|
raise error.Abort("subrepository '%s' does not exist in %s!"
|
|
3715
|
raise error.Abort("subrepository '%s' does not exist in %s!"
|
|
3717
|
% (sub, short(ctx.node())))
|
|
3716
|
% (sub, short(ctx.node())))
|
|
3718
|
|
|
3717
|
|
|
3719
|
def _revertprefetch(repo, ctx, *files):
|
|
3718
|
def _revertprefetch(repo, ctx, *files):
|
|
3720
|
"""Let extension changing the storage layer prefetch content"""
|
|
3719
|
"""Let extension changing the storage layer prefetch content"""
|
|
3721
|
|
|
3720
|
|
|
3722
|
def _performrevert(repo, parents, ctx, actions, interactive=False,
|
|
3721
|
def _performrevert(repo, parents, ctx, actions, interactive=False,
|
|
3723
|
tobackup=None):
|
|
3722
|
tobackup=None):
|
|
3724
|
"""function that actually perform all the actions computed for revert
|
|
3723
|
"""function that actually perform all the actions computed for revert
|
|
3725
|
|
|
3724
|
|
|
3726
|
This is an independent function to let extension to plug in and react to
|
|
3725
|
This is an independent function to let extension to plug in and react to
|
|
3727
|
the imminent revert.
|
|
3726
|
the imminent revert.
|
|
3728
|
|
|
3727
|
|
|
3729
|
Make sure you have the working directory locked when calling this function.
|
|
3728
|
Make sure you have the working directory locked when calling this function.
|
|
3730
|
"""
|
|
3729
|
"""
|
|
3731
|
parent, p2 = parents
|
|
3730
|
parent, p2 = parents
|
|
3732
|
node = ctx.node()
|
|
3731
|
node = ctx.node()
|
|
3733
|
excluded_files = []
|
|
3732
|
excluded_files = []
|
|
3734
|
matcher_opts = {"exclude": excluded_files}
|
|
3733
|
matcher_opts = {"exclude": excluded_files}
|
|
3735
|
|
|
3734
|
|
|
3736
|
def checkout(f):
|
|
3735
|
def checkout(f):
|
|
3737
|
fc = ctx[f]
|
|
3736
|
fc = ctx[f]
|
|
3738
|
repo.wwrite(f, fc.data(), fc.flags())
|
|
3737
|
repo.wwrite(f, fc.data(), fc.flags())
|
|
3739
|
|
|
3738
|
|
|
3740
|
def doremove(f):
|
|
3739
|
def doremove(f):
|
|
3741
|
try:
|
|
3740
|
try:
|
|
3742
|
repo.wvfs.unlinkpath(f)
|
|
3741
|
repo.wvfs.unlinkpath(f)
|
|
3743
|
except OSError:
|
|
3742
|
except OSError:
|
|
3744
|
pass
|
|
3743
|
pass
|
|
3745
|
repo.dirstate.remove(f)
|
|
3744
|
repo.dirstate.remove(f)
|
|
3746
|
|
|
3745
|
|
|
3747
|
audit_path = pathutil.pathauditor(repo.root, cached=True)
|
|
3746
|
audit_path = pathutil.pathauditor(repo.root, cached=True)
|
|
3748
|
for f in actions['forget'][0]:
|
|
3747
|
for f in actions['forget'][0]:
|
|
3749
|
if interactive:
|
|
3748
|
if interactive:
|
|
3750
|
choice = repo.ui.promptchoice(
|
|
3749
|
choice = repo.ui.promptchoice(
|
|
3751
|
_("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
|
|
3750
|
_("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
|
|
3752
|
if choice == 0:
|
|
3751
|
if choice == 0:
|
|
3753
|
repo.dirstate.drop(f)
|
|
3752
|
repo.dirstate.drop(f)
|
|
3754
|
else:
|
|
3753
|
else:
|
|
3755
|
excluded_files.append(repo.wjoin(f))
|
|
3754
|
excluded_files.append(repo.wjoin(f))
|
|
3756
|
else:
|
|
3755
|
else:
|
|
3757
|
repo.dirstate.drop(f)
|
|
3756
|
repo.dirstate.drop(f)
|
|
3758
|
for f in actions['remove'][0]:
|
|
3757
|
for f in actions['remove'][0]:
|
|
3759
|
audit_path(f)
|
|
3758
|
audit_path(f)
|
|
3760
|
if interactive:
|
|
3759
|
if interactive:
|
|
3761
|
choice = repo.ui.promptchoice(
|
|
3760
|
choice = repo.ui.promptchoice(
|
|
3762
|
_("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
|
|
3761
|
_("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
|
|
3763
|
if choice == 0:
|
|
3762
|
if choice == 0:
|
|
3764
|
doremove(f)
|
|
3763
|
doremove(f)
|
|
3765
|
else:
|
|
3764
|
else:
|
|
3766
|
excluded_files.append(repo.wjoin(f))
|
|
3765
|
excluded_files.append(repo.wjoin(f))
|
|
3767
|
else:
|
|
3766
|
else:
|
|
3768
|
doremove(f)
|
|
3767
|
doremove(f)
|
|
3769
|
for f in actions['drop'][0]:
|
|
3768
|
for f in actions['drop'][0]:
|
|
3770
|
audit_path(f)
|
|
3769
|
audit_path(f)
|
|
3771
|
repo.dirstate.remove(f)
|
|
3770
|
repo.dirstate.remove(f)
|
|
3772
|
|
|
3771
|
|
|
3773
|
normal = None
|
|
3772
|
normal = None
|
|
3774
|
if node == parent:
|
|
3773
|
if node == parent:
|
|
3775
|
# We're reverting to our parent. If possible, we'd like status
|
|
3774
|
# We're reverting to our parent. If possible, we'd like status
|
|
3776
|
# to report the file as clean. We have to use normallookup for
|
|
3775
|
# to report the file as clean. We have to use normallookup for
|
|
3777
|
# merges to avoid losing information about merged/dirty files.
|
|
3776
|
# merges to avoid losing information about merged/dirty files.
|
|
3778
|
if p2 != nullid:
|
|
3777
|
if p2 != nullid:
|
|
3779
|
normal = repo.dirstate.normallookup
|
|
3778
|
normal = repo.dirstate.normallookup
|
|
3780
|
else:
|
|
3779
|
else:
|
|
3781
|
normal = repo.dirstate.normal
|
|
3780
|
normal = repo.dirstate.normal
|
|
3782
|
|
|
3781
|
|
|
3783
|
newlyaddedandmodifiedfiles = set()
|
|
3782
|
newlyaddedandmodifiedfiles = set()
|
|
3784
|
if interactive:
|
|
3783
|
if interactive:
|
|
3785
|
# Prompt the user for changes to revert
|
|
3784
|
# Prompt the user for changes to revert
|
|
3786
|
torevert = [repo.wjoin(f) for f in actions['revert'][0]]
|
|
3785
|
torevert = [repo.wjoin(f) for f in actions['revert'][0]]
|
|
3787
|
m = scmutil.match(ctx, torevert, matcher_opts)
|
|
3786
|
m = scmutil.match(ctx, torevert, matcher_opts)
|
|
3788
|
diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
|
|
3787
|
diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
|
|
3789
|
diffopts.nodates = True
|
|
3788
|
diffopts.nodates = True
|
|
3790
|
diffopts.git = True
|
|
3789
|
diffopts.git = True
|
|
3791
|
operation = 'discard'
|
|
3790
|
operation = 'discard'
|
|
3792
|
reversehunks = True
|
|
3791
|
reversehunks = True
|
|
3793
|
if node != parent:
|
|
3792
|
if node != parent:
|
|
3794
|
operation = 'apply'
|
|
3793
|
operation = 'apply'
|
|
3795
|
reversehunks = False
|
|
3794
|
reversehunks = False
|
|
3796
|
if reversehunks:
|
|
3795
|
if reversehunks:
|
|
3797
|
diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
|
|
3796
|
diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
|
|
3798
|
else:
|
|
3797
|
else:
|
|
3799
|
diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
|
|
3798
|
diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
|
|
3800
|
originalchunks = patch.parsepatch(diff)
|
|
3799
|
originalchunks = patch.parsepatch(diff)
|
|
3801
|
|
|
3800
|
|
|
3802
|
try:
|
|
3801
|
try:
|
|
3803
|
|
|
3802
|
|
|
3804
|
chunks, opts = recordfilter(repo.ui, originalchunks,
|
|
3803
|
chunks, opts = recordfilter(repo.ui, originalchunks,
|
|
3805
|
operation=operation)
|
|
3804
|
operation=operation)
|
|
3806
|
if reversehunks:
|
|
3805
|
if reversehunks:
|
|
3807
|
chunks = patch.reversehunks(chunks)
|
|
3806
|
chunks = patch.reversehunks(chunks)
|
|
3808
|
|
|
3807
|
|
|
3809
|
except error.PatchError as err:
|
|
3808
|
except error.PatchError as err:
|
|
3810
|
raise error.Abort(_('error parsing patch: %s') % err)
|
|
3809
|
raise error.Abort(_('error parsing patch: %s') % err)
|
|
3811
|
|
|
3810
|
|
|
3812
|
newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
|
|
3811
|
newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
|
|
3813
|
if tobackup is None:
|
|
3812
|
if tobackup is None:
|
|
3814
|
tobackup = set()
|
|
3813
|
tobackup = set()
|
|
3815
|
# Apply changes
|
|
3814
|
# Apply changes
|
|
3816
|
fp = stringio()
|
|
3815
|
fp = stringio()
|
|
3817
|
for c in chunks:
|
|
3816
|
for c in chunks:
|
|
3818
|
# Create a backup file only if this hunk should be backed up
|
|
3817
|
# Create a backup file only if this hunk should be backed up
|
|
3819
|
if ishunk(c) and c.header.filename() in tobackup:
|
|
3818
|
if ishunk(c) and c.header.filename() in tobackup:
|
|
3820
|
abs = c.header.filename()
|
|
3819
|
abs = c.header.filename()
|
|
3821
|
target = repo.wjoin(abs)
|
|
3820
|
target = repo.wjoin(abs)
|
|
3822
|
bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
|
|
3821
|
bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
|
|
3823
|
util.copyfile(target, bakname)
|
|
3822
|
util.copyfile(target, bakname)
|
|
3824
|
tobackup.remove(abs)
|
|
3823
|
tobackup.remove(abs)
|
|
3825
|
c.write(fp)
|
|
3824
|
c.write(fp)
|
|
3826
|
dopatch = fp.tell()
|
|
3825
|
dopatch = fp.tell()
|
|
3827
|
fp.seek(0)
|
|
3826
|
fp.seek(0)
|
|
3828
|
if dopatch:
|
|
3827
|
if dopatch:
|
|
3829
|
try:
|
|
3828
|
try:
|
|
3830
|
patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
|
|
3829
|
patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
|
|
3831
|
except error.PatchError as err:
|
|
3830
|
except error.PatchError as err:
|
|
3832
|
raise error.Abort(str(err))
|
|
3831
|
raise error.Abort(str(err))
|
|
3833
|
del fp
|
|
3832
|
del fp
|
|
3834
|
else:
|
|
3833
|
else:
|
|
3835
|
for f in actions['revert'][0]:
|
|
3834
|
for f in actions['revert'][0]:
|
|
3836
|
checkout(f)
|
|
3835
|
checkout(f)
|
|
3837
|
if normal:
|
|
3836
|
if normal:
|
|
3838
|
normal(f)
|
|
3837
|
normal(f)
|
|
3839
|
|
|
3838
|
|
|
3840
|
for f in actions['add'][0]:
|
|
3839
|
for f in actions['add'][0]:
|
|
3841
|
# Don't checkout modified files, they are already created by the diff
|
|
3840
|
# Don't checkout modified files, they are already created by the diff
|
|
3842
|
if f not in newlyaddedandmodifiedfiles:
|
|
3841
|
if f not in newlyaddedandmodifiedfiles:
|
|
3843
|
checkout(f)
|
|
3842
|
checkout(f)
|
|
3844
|
repo.dirstate.add(f)
|
|
3843
|
repo.dirstate.add(f)
|
|
3845
|
|
|
3844
|
|
|
3846
|
normal = repo.dirstate.normallookup
|
|
3845
|
normal = repo.dirstate.normallookup
|
|
3847
|
if node == parent and p2 == nullid:
|
|
3846
|
if node == parent and p2 == nullid:
|
|
3848
|
normal = repo.dirstate.normal
|
|
3847
|
normal = repo.dirstate.normal
|
|
3849
|
for f in actions['undelete'][0]:
|
|
3848
|
for f in actions['undelete'][0]:
|
|
3850
|
checkout(f)
|
|
3849
|
checkout(f)
|
|
3851
|
normal(f)
|
|
3850
|
normal(f)
|
|
3852
|
|
|
3851
|
|
|
3853
|
copied = copies.pathcopies(repo[parent], ctx)
|
|
3852
|
copied = copies.pathcopies(repo[parent], ctx)
|
|
3854
|
|
|
3853
|
|
|
3855
|
for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
|
|
3854
|
for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
|
|
3856
|
if f in copied:
|
|
3855
|
if f in copied:
|
|
3857
|
repo.dirstate.copy(copied[f], f)
|
|
3856
|
repo.dirstate.copy(copied[f], f)
|
|
3858
|
|
|
3857
|
|
|
3859
|
class command(registrar.command):
|
|
3858
|
class command(registrar.command):
|
|
3860
|
"""deprecated: used registrar.command instead"""
|
|
3859
|
"""deprecated: used registrar.command instead"""
|
|
3861
|
def _doregister(self, func, name, *args, **kwargs):
|
|
3860
|
def _doregister(self, func, name, *args, **kwargs):
|
|
3862
|
func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
|
|
3861
|
func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
|
|
3863
|
return super(command, self)._doregister(func, name, *args, **kwargs)
|
|
3862
|
return super(command, self)._doregister(func, name, *args, **kwargs)
|
|
3864
|
|
|
3863
|
|
|
3865
|
# a list of (ui, repo, otherpeer, opts, missing) functions called by
|
|
3864
|
# a list of (ui, repo, otherpeer, opts, missing) functions called by
|
|
3866
|
# commands.outgoing. "missing" is "missing" of the result of
|
|
3865
|
# commands.outgoing. "missing" is "missing" of the result of
|
|
3867
|
# "findcommonoutgoing()"
|
|
3866
|
# "findcommonoutgoing()"
|
|
3868
|
outgoinghooks = util.hooks()
|
|
3867
|
outgoinghooks = util.hooks()
|
|
3869
|
|
|
3868
|
|
|
3870
|
# a list of (ui, repo) functions called by commands.summary
|
|
3869
|
# a list of (ui, repo) functions called by commands.summary
|
|
3871
|
summaryhooks = util.hooks()
|
|
3870
|
summaryhooks = util.hooks()
|
|
3872
|
|
|
3871
|
|
|
3873
|
# a list of (ui, repo, opts, changes) functions called by commands.summary.
|
|
3872
|
# a list of (ui, repo, opts, changes) functions called by commands.summary.
|
|
3874
|
#
|
|
3873
|
#
|
|
3875
|
# functions should return tuple of booleans below, if 'changes' is None:
|
|
3874
|
# functions should return tuple of booleans below, if 'changes' is None:
|
|
3876
|
# (whether-incomings-are-needed, whether-outgoings-are-needed)
|
|
3875
|
# (whether-incomings-are-needed, whether-outgoings-are-needed)
|
|
3877
|
#
|
|
3876
|
#
|
|
3878
|
# otherwise, 'changes' is a tuple of tuples below:
|
|
3877
|
# otherwise, 'changes' is a tuple of tuples below:
|
|
3879
|
# - (sourceurl, sourcebranch, sourcepeer, incoming)
|
|
3878
|
# - (sourceurl, sourcebranch, sourcepeer, incoming)
|
|
3880
|
# - (desturl, destbranch, destpeer, outgoing)
|
|
3879
|
# - (desturl, destbranch, destpeer, outgoing)
|
|
3881
|
summaryremotehooks = util.hooks()
|
|
3880
|
summaryremotehooks = util.hooks()
|
|
3882
|
|
|
3881
|
|
|
3883
|
# A list of state files kept by multistep operations like graft.
|
|
3882
|
# A list of state files kept by multistep operations like graft.
|
|
3884
|
# Since graft cannot be aborted, it is considered 'clearable' by update.
|
|
3883
|
# Since graft cannot be aborted, it is considered 'clearable' by update.
|
|
3885
|
# note: bisect is intentionally excluded
|
|
3884
|
# note: bisect is intentionally excluded
|
|
3886
|
# (state file, clearable, allowcommit, error, hint)
|
|
3885
|
# (state file, clearable, allowcommit, error, hint)
|
|
3887
|
unfinishedstates = [
|
|
3886
|
unfinishedstates = [
|
|
3888
|
('graftstate', True, False, _('graft in progress'),
|
|
3887
|
('graftstate', True, False, _('graft in progress'),
|
|
3889
|
_("use 'hg graft --continue' or 'hg update' to abort")),
|
|
3888
|
_("use 'hg graft --continue' or 'hg update' to abort")),
|
|
3890
|
('updatestate', True, False, _('last update was interrupted'),
|
|
3889
|
('updatestate', True, False, _('last update was interrupted'),
|
|
3891
|
_("use 'hg update' to get a consistent checkout"))
|
|
3890
|
_("use 'hg update' to get a consistent checkout"))
|
|
3892
|
]
|
|
3891
|
]
|
|
3893
|
|
|
3892
|
|
|
3894
|
def checkunfinished(repo, commit=False):
|
|
3893
|
def checkunfinished(repo, commit=False):
|
|
3895
|
'''Look for an unfinished multistep operation, like graft, and abort
|
|
3894
|
'''Look for an unfinished multistep operation, like graft, and abort
|
|
3896
|
if found. It's probably good to check this right before
|
|
3895
|
if found. It's probably good to check this right before
|
|
3897
|
bailifchanged().
|
|
3896
|
bailifchanged().
|
|
3898
|
'''
|
|
3897
|
'''
|
|
3899
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3898
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3900
|
if commit and allowcommit:
|
|
3899
|
if commit and allowcommit:
|
|
3901
|
continue
|
|
3900
|
continue
|
|
3902
|
if repo.vfs.exists(f):
|
|
3901
|
if repo.vfs.exists(f):
|
|
3903
|
raise error.Abort(msg, hint=hint)
|
|
3902
|
raise error.Abort(msg, hint=hint)
|
|
3904
|
|
|
3903
|
|
|
3905
|
def clearunfinished(repo):
|
|
3904
|
def clearunfinished(repo):
|
|
3906
|
'''Check for unfinished operations (as above), and clear the ones
|
|
3905
|
'''Check for unfinished operations (as above), and clear the ones
|
|
3907
|
that are clearable.
|
|
3906
|
that are clearable.
|
|
3908
|
'''
|
|
3907
|
'''
|
|
3909
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3908
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3910
|
if not clearable and repo.vfs.exists(f):
|
|
3909
|
if not clearable and repo.vfs.exists(f):
|
|
3911
|
raise error.Abort(msg, hint=hint)
|
|
3910
|
raise error.Abort(msg, hint=hint)
|
|
3912
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3911
|
for f, clearable, allowcommit, msg, hint in unfinishedstates:
|
|
3913
|
if clearable and repo.vfs.exists(f):
|
|
3912
|
if clearable and repo.vfs.exists(f):
|
|
3914
|
util.unlink(repo.vfs.join(f))
|
|
3913
|
util.unlink(repo.vfs.join(f))
|
|
3915
|
|
|
3914
|
|
|
3916
|
afterresolvedstates = [
|
|
3915
|
afterresolvedstates = [
|
|
3917
|
('graftstate',
|
|
3916
|
('graftstate',
|
|
3918
|
_('hg graft --continue')),
|
|
3917
|
_('hg graft --continue')),
|
|
3919
|
]
|
|
3918
|
]
|
|
3920
|
|
|
3919
|
|
|
3921
|
def howtocontinue(repo):
|
|
3920
|
def howtocontinue(repo):
|
|
3922
|
'''Check for an unfinished operation and return the command to finish
|
|
3921
|
'''Check for an unfinished operation and return the command to finish
|
|
3923
|
it.
|
|
3922
|
it.
|
|
3924
|
|
|
3923
|
|
|
3925
|
afterresolvedstates tuples define a .hg/{file} and the corresponding
|
|
3924
|
afterresolvedstates tuples define a .hg/{file} and the corresponding
|
|
3926
|
command needed to finish it.
|
|
3925
|
command needed to finish it.
|
|
3927
|
|
|
3926
|
|
|
3928
|
Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
|
|
3927
|
Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
|
|
3929
|
a boolean.
|
|
3928
|
a boolean.
|
|
3930
|
'''
|
|
3929
|
'''
|
|
3931
|
contmsg = _("continue: %s")
|
|
3930
|
contmsg = _("continue: %s")
|
|
3932
|
for f, msg in afterresolvedstates:
|
|
3931
|
for f, msg in afterresolvedstates:
|
|
3933
|
if repo.vfs.exists(f):
|
|
3932
|
if repo.vfs.exists(f):
|
|
3934
|
return contmsg % msg, True
|
|
3933
|
return contmsg % msg, True
|
|
3935
|
if repo[None].dirty(missing=True, merge=False, branch=False):
|
|
3934
|
if repo[None].dirty(missing=True, merge=False, branch=False):
|
|
3936
|
return contmsg % _("hg commit"), False
|
|
3935
|
return contmsg % _("hg commit"), False
|
|
3937
|
return None, None
|
|
3936
|
return None, None
|
|
3938
|
|
|
3937
|
|
|
3939
|
def checkafterresolved(repo):
|
|
3938
|
def checkafterresolved(repo):
|
|
3940
|
'''Inform the user about the next action after completing hg resolve
|
|
3939
|
'''Inform the user about the next action after completing hg resolve
|
|
3941
|
|
|
3940
|
|
|
3942
|
If there's a matching afterresolvedstates, howtocontinue will yield
|
|
3941
|
If there's a matching afterresolvedstates, howtocontinue will yield
|
|
3943
|
repo.ui.warn as the reporter.
|
|
3942
|
repo.ui.warn as the reporter.
|
|
3944
|
|
|
3943
|
|
|
3945
|
Otherwise, it will yield repo.ui.note.
|
|
3944
|
Otherwise, it will yield repo.ui.note.
|
|
3946
|
'''
|
|
3945
|
'''
|
|
3947
|
msg, warning = howtocontinue(repo)
|
|
3946
|
msg, warning = howtocontinue(repo)
|
|
3948
|
if msg is not None:
|
|
3947
|
if msg is not None:
|
|
3949
|
if warning:
|
|
3948
|
if warning:
|
|
3950
|
repo.ui.warn("%s\n" % msg)
|
|
3949
|
repo.ui.warn("%s\n" % msg)
|
|
3951
|
else:
|
|
3950
|
else:
|
|
3952
|
repo.ui.note("%s\n" % msg)
|
|
3951
|
repo.ui.note("%s\n" % msg)
|
|
3953
|
|
|
3952
|
|
|
3954
|
def wrongtooltocontinue(repo, task):
|
|
3953
|
def wrongtooltocontinue(repo, task):
|
|
3955
|
'''Raise an abort suggesting how to properly continue if there is an
|
|
3954
|
'''Raise an abort suggesting how to properly continue if there is an
|
|
3956
|
active task.
|
|
3955
|
active task.
|
|
3957
|
|
|
3956
|
|
|
3958
|
Uses howtocontinue() to find the active task.
|
|
3957
|
Uses howtocontinue() to find the active task.
|
|
3959
|
|
|
3958
|
|
|
3960
|
If there's no task (repo.ui.note for 'hg commit'), it does not offer
|
|
3959
|
If there's no task (repo.ui.note for 'hg commit'), it does not offer
|
|
3961
|
a hint.
|
|
3960
|
a hint.
|
|
3962
|
'''
|
|
3961
|
'''
|
|
3963
|
after = howtocontinue(repo)
|
|
3962
|
after = howtocontinue(repo)
|
|
3964
|
hint = None
|
|
3963
|
hint = None
|
|
3965
|
if after[1]:
|
|
3964
|
if after[1]:
|
|
3966
|
hint = after[0]
|
|
3965
|
hint = after[0]
|
|
3967
|
raise error.Abort(_('no %s in progress') % task, hint=hint)
|
|
3966
|
raise error.Abort(_('no %s in progress') % task, hint=hint)
|