##// END OF EJS Templates
brancing: merge stable into default
marmoute -
r52727:454fedda merge default
parent child Browse files
Show More
@@ -1,108 +1,108
1 # highlight - syntax highlighting in hgweb, based on Pygments
1 # highlight - syntax highlighting in hgweb, based on Pygments
2 #
2 #
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 """syntax highlighting for hgweb (requires Pygments)
11 """syntax highlighting for hgweb (requires Pygments)
12
12
13 It depends on the Pygments syntax highlighting library:
13 It depends on the Pygments syntax highlighting library:
14 http://pygments.org/
14 http://pygments.org/
15
15
16 There are the following configuration options::
16 There are the following configuration options::
17
17
18 [web]
18 [web]
19 pygments_style = <style> (default: colorful)
19 pygments_style = <style> (default: colorful)
20 highlightfiles = <fileset> (default: size('<5M'))
20 highlightfiles = <fileset> (default: size('<5M'))
21 highlightonlymatchfilename = <bool> (default False)
21 highlightonlymatchfilename = <bool> (default False)
22
22
23 ``highlightonlymatchfilename`` will only highlight files if their type could
23 ``highlightonlymatchfilename`` will only highlight files if their type could
24 be identified by their filename. When this is not enabled (the default),
24 be identified by their filename. When this is not enabled (the default),
25 Pygments will try very hard to identify the file type from content and any
25 Pygments will try very hard to identify the file type from content and any
26 match (even matches with a low confidence score) will be used.
26 match (even matches with a low confidence score) will be used.
27 """
27 """
28
28
29
29
30 from . import highlight
30 from . import highlight
31 from mercurial.hgweb import (
31 from mercurial.hgweb import (
32 webcommands,
32 webcommands,
33 webutil,
33 webutil,
34 )
34 )
35
35
36 from mercurial import (
36 from mercurial import (
37 extensions,
37 extensions,
38 pycompat,
38 pycompat,
39 )
39 )
40
40
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # be specifying the version(s) of Mercurial they are tested with, or
43 # be specifying the version(s) of Mercurial they are tested with, or
44 # leave the attribute unspecified.
44 # leave the attribute unspecified.
45 testedwith = b'ships-with-hg-core'
45 testedwith = b'ships-with-hg-core'
46
46
47
47
48 def pygmentize(web, field, fctx, tmpl):
48 def pygmentize(web, field, fctx, tmpl):
49 style = web.config(b'web', b'pygments_style', b'colorful')
49 style = web.config(b'web', b'pygments_style', b'colorful')
50 expr = web.config(b'web', b'highlightfiles', b"size('<5M')")
50 expr = web.config(b'web', b'highlightfiles', b"size('<5M')")
51 filenameonly = web.configbool(b'web', b'highlightonlymatchfilename', False)
51 filenameonly = web.configbool(b'web', b'highlightonlymatchfilename', False)
52
52
53 ctx = fctx.changectx()
53 ctx = fctx.changectx()
54 m = ctx.matchfileset(fctx.repo().root, expr)
54 m = ctx.matchfileset(fctx.repo().root, expr)
55 if m(fctx.path()):
55 if m(fctx.path()):
56 highlight.pygmentize(
56 highlight.pygmentize(
57 field, fctx, style, tmpl, guessfilenameonly=filenameonly
57 field, fctx, style, tmpl, guessfilenameonly=filenameonly
58 )
58 )
59
59
60
60
61 def filerevision_highlight(orig, web, fctx):
61 def filerevision_highlight(orig, web, fctx):
62 mt = web.res.headers[b'Content-Type']
62 mt = web.res.headers[b'Content-Type']
63 # only pygmentize for mimetype containing 'html' so we both match
63 # only pygmentize for mimetype containing 'html' so we both match
64 # 'text/html' and possibly 'application/xhtml+xml' in the future
64 # 'text/html' and possibly 'application/xhtml+xml' in the future
65 # so that we don't have to touch the extension when the mimetype
65 # so that we don't have to touch the extension when the mimetype
66 # for a template changes; also hgweb optimizes the case that a
66 # for a template changes; also hgweb optimizes the case that a
67 # raw file is sent using rawfile() and doesn't call us, so we
67 # raw file is sent using rawfile() and doesn't call us, so we
68 # can't clash with the file's content-type here in case we
68 # can't clash with the file's content-type here in case we
69 # pygmentize a html file
69 # pygmentize a html file
70 if b'html' in mt:
70 if b'html' in mt:
71 pygmentize(web, b'fileline', fctx, web.tmpl)
71 pygmentize(web, b'fileline', fctx, web.tmpl)
72
72
73 return orig(web, fctx)
73 return orig(web, fctx)
74
74
75
75
76 def annotate_highlight(orig, web):
76 def annotate_highlight(orig, web):
77 mt = web.res.headers[b'Content-Type']
77 mt = web.res.headers[b'Content-Type']
78 if b'html' in mt:
78 if b'html' in mt:
79 fctx = webutil.filectx(web.repo, web.req)
79 fctx = webutil.filectx(web.repo, web.req)
80 pygmentize(web, b'annotateline', fctx, web.tmpl)
80 pygmentize(web, b'annotateline', fctx, web.tmpl)
81
81
82 return orig(web)
82 return orig(web)
83
83
84
84
85 def generate_css(web):
85 def generate_css(web):
86 pg_style = web.config(b'web', b'pygments_style', b'colorful')
86 pg_style = web.config(b'web', b'pygments_style', b'colorful')
87 fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
87 fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
88 web.res.headers[b'Content-Type'] = b'text/css'
88 web.res.headers[b'Content-Type'] = b'text/css'
89 style_defs = fmter.get_style_defs(pycompat.sysstr(b''))
89 style_defs = fmter.get_style_defs(pycompat.sysstr(b''))
90 web.res.setbodybytes(
90 web.res.setbodybytes(
91 b''.join(
91 b''.join(
92 [
92 [
93 b'/* pygments_style = %s */\n\n' % pg_style,
93 b'/* pygments_style = %s */\n\n' % pg_style,
94 pycompat.bytestr(style_defs),
94 pycompat.bytestr(style_defs),
95 ]
95 ]
96 )
96 )
97 )
97 )
98 return web.res.sendresponse()
98 return web.res.sendresponse()
99
99
100
100
101 def extsetup(ui):
101 def extsetup(ui):
102 # monkeypatch in the new version
102 # monkeypatch in the new version
103 extensions.wrapfunction(
103 extensions.wrapfunction(
104 webcommands, '_filerevision', filerevision_highlight
104 webcommands, '_filerevision', filerevision_highlight
105 )
105 )
106 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
106 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
107 webcommands.highlightcss = generate_css
107 webcommands.highlightcss = generate_css
108 webcommands.__all__.append(b'highlightcss')
108 webcommands.__all__.append('highlightcss')
@@ -1,2685 +1,2685
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but allow edits before making new commit
37 # e, edit = use commit, but allow edits before making new commit
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 # b, base = checkout changeset and apply further changesets from there
42 # b, base = checkout changeset and apply further changesets from there
43 #
43 #
44
44
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 for each revision in your history. For example, if you had meant to add gamma
46 for each revision in your history. For example, if you had meant to add gamma
47 before beta, and then wanted to add delta in the same revision as beta, you
47 before beta, and then wanted to add delta in the same revision as beta, you
48 would reorganize the file to look like this::
48 would reorganize the file to look like this::
49
49
50 pick 030b686bedc4 Add gamma
50 pick 030b686bedc4 Add gamma
51 pick c561b4e977df Add beta
51 pick c561b4e977df Add beta
52 fold 7c2fd3b9020c Add delta
52 fold 7c2fd3b9020c Add delta
53
53
54 # Edit history between c561b4e977df and 7c2fd3b9020c
54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 #
55 #
56 # Commits are listed from least to most recent
56 # Commits are listed from least to most recent
57 #
57 #
58 # Commands:
58 # Commands:
59 # p, pick = use commit
59 # p, pick = use commit
60 # e, edit = use commit, but allow edits before making new commit
60 # e, edit = use commit, but allow edits before making new commit
61 # f, fold = use commit, but combine it with the one above
61 # f, fold = use commit, but combine it with the one above
62 # r, roll = like fold, but discard this commit's description and date
62 # r, roll = like fold, but discard this commit's description and date
63 # d, drop = remove commit from history
63 # d, drop = remove commit from history
64 # m, mess = edit commit message without changing commit content
64 # m, mess = edit commit message without changing commit content
65 # b, base = checkout changeset and apply further changesets from there
65 # b, base = checkout changeset and apply further changesets from there
66 #
66 #
67
67
68 At which point you close the editor and ``histedit`` starts working. When you
68 At which point you close the editor and ``histedit`` starts working. When you
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 those revisions together, offering you a chance to clean up the commit message::
70 those revisions together, offering you a chance to clean up the commit message::
71
71
72 Add beta
72 Add beta
73 ***
73 ***
74 Add delta
74 Add delta
75
75
76 Edit the commit message to your liking, then close the editor. The date used
76 Edit the commit message to your liking, then close the editor. The date used
77 for the commit will be the later of the two commits' dates. For this example,
77 for the commit will be the later of the two commits' dates. For this example,
78 let's assume that the commit message was changed to ``Add beta and delta.``
78 let's assume that the commit message was changed to ``Add beta and delta.``
79 After histedit has run and had a chance to remove any old or temporary
79 After histedit has run and had a chance to remove any old or temporary
80 revisions it needed, the history looks like this::
80 revisions it needed, the history looks like this::
81
81
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 | Add beta and delta.
83 | Add beta and delta.
84 |
84 |
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 | Add gamma
86 | Add gamma
87 |
87 |
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 Add alpha
89 Add alpha
90
90
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 ones) until after it has completed all the editing operations, so it will
92 ones) until after it has completed all the editing operations, so it will
93 probably perform several strip operations when it's done. For the above example,
93 probably perform several strip operations when it's done. For the above example,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 so you might need to be a little patient. You can choose to keep the original
95 so you might need to be a little patient. You can choose to keep the original
96 revisions by passing the ``--keep`` flag.
96 revisions by passing the ``--keep`` flag.
97
97
98 The ``edit`` operation will drop you back to a command prompt,
98 The ``edit`` operation will drop you back to a command prompt,
99 allowing you to edit files freely, or even use ``hg record`` to commit
99 allowing you to edit files freely, or even use ``hg record`` to commit
100 some changes as a separate commit. When you're done, any remaining
100 some changes as a separate commit. When you're done, any remaining
101 uncommitted changes will be committed as well. When done, run ``hg
101 uncommitted changes will be committed as well. When done, run ``hg
102 histedit --continue`` to finish this step. If there are uncommitted
102 histedit --continue`` to finish this step. If there are uncommitted
103 changes, you'll be prompted for a new commit message, but the default
103 changes, you'll be prompted for a new commit message, but the default
104 commit message will be the original message for the ``edit`` ed
104 commit message will be the original message for the ``edit`` ed
105 revision, and the date of the original commit will be preserved.
105 revision, and the date of the original commit will be preserved.
106
106
107 The ``message`` operation will give you a chance to revise a commit
107 The ``message`` operation will give you a chance to revise a commit
108 message without changing the contents. It's a shortcut for doing
108 message without changing the contents. It's a shortcut for doing
109 ``edit`` immediately followed by `hg histedit --continue``.
109 ``edit`` immediately followed by `hg histedit --continue``.
110
110
111 If ``histedit`` encounters a conflict when moving a revision (while
111 If ``histedit`` encounters a conflict when moving a revision (while
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 ``edit`` with the difference that it won't prompt you for a commit
113 ``edit`` with the difference that it won't prompt you for a commit
114 message when done. If you decide at this point that you don't like how
114 message when done. If you decide at this point that you don't like how
115 much work it will be to rearrange history, or that you made a mistake,
115 much work it will be to rearrange history, or that you made a mistake,
116 you can use ``hg histedit --abort`` to abandon the new changes you
116 you can use ``hg histedit --abort`` to abandon the new changes you
117 have made and return to the state before you attempted to edit your
117 have made and return to the state before you attempted to edit your
118 history.
118 history.
119
119
120 If we clone the histedit-ed example repository above and add four more
120 If we clone the histedit-ed example repository above and add four more
121 changes, such that we have the following history::
121 changes, such that we have the following history::
122
122
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 | Add theta
124 | Add theta
125 |
125 |
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 | Add eta
127 | Add eta
128 |
128 |
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 | Add zeta
130 | Add zeta
131 |
131 |
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 | Add epsilon
133 | Add epsilon
134 |
134 |
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 | Add beta and delta.
136 | Add beta and delta.
137 |
137 |
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 | Add gamma
139 | Add gamma
140 |
140 |
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 Add alpha
142 Add alpha
143
143
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 as running ``hg histedit 836302820282``. If you need plan to push to a
145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 repository that Mercurial does not detect to be related to the source
146 repository that Mercurial does not detect to be related to the source
147 repo, you can add a ``--force`` option.
147 repo, you can add a ``--force`` option.
148
148
149 Config
149 Config
150 ------
150 ------
151
151
152 Histedit rule lines are truncated to 80 characters by default. You
152 Histedit rule lines are truncated to 80 characters by default. You
153 can customize this behavior by setting a different length in your
153 can customize this behavior by setting a different length in your
154 configuration file::
154 configuration file::
155
155
156 [histedit]
156 [histedit]
157 linelen = 120 # truncate rule lines at 120 characters
157 linelen = 120 # truncate rule lines at 120 characters
158
158
159 The summary of a change can be customized as well::
159 The summary of a change can be customized as well::
160
160
161 [histedit]
161 [histedit]
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
163
163
164 The customized summary should be kept short enough that rule lines
164 The customized summary should be kept short enough that rule lines
165 will fit in the configured line length. See above if that requires
165 will fit in the configured line length. See above if that requires
166 customization.
166 customization.
167
167
168 ``hg histedit`` attempts to automatically choose an appropriate base
168 ``hg histedit`` attempts to automatically choose an appropriate base
169 revision to use. To change which base revision is used, define a
169 revision to use. To change which base revision is used, define a
170 revset in your configuration file::
170 revset in your configuration file::
171
171
172 [histedit]
172 [histedit]
173 defaultrev = only(.) & draft()
173 defaultrev = only(.) & draft()
174
174
175 By default each edited revision needs to be present in histedit commands.
175 By default each edited revision needs to be present in histedit commands.
176 To remove revision you need to use ``drop`` operation. You can configure
176 To remove revision you need to use ``drop`` operation. You can configure
177 the drop to be implicit for missing commits by adding::
177 the drop to be implicit for missing commits by adding::
178
178
179 [histedit]
179 [histedit]
180 dropmissing = True
180 dropmissing = True
181
181
182 By default, histedit will close the transaction after each action. For
182 By default, histedit will close the transaction after each action. For
183 performance purposes, you can configure histedit to use a single transaction
183 performance purposes, you can configure histedit to use a single transaction
184 across the entire histedit. WARNING: This setting introduces a significant risk
184 across the entire histedit. WARNING: This setting introduces a significant risk
185 of losing the work you've done in a histedit if the histedit aborts
185 of losing the work you've done in a histedit if the histedit aborts
186 unexpectedly::
186 unexpectedly::
187
187
188 [histedit]
188 [histedit]
189 singletransaction = True
189 singletransaction = True
190
190
191 """
191 """
192
192
193
193
194 # chistedit dependencies that are not available everywhere
194 # chistedit dependencies that are not available everywhere
195 try:
195 try:
196 import fcntl
196 import fcntl
197 import termios
197 import termios
198 except ImportError:
198 except ImportError:
199 fcntl = None
199 fcntl = None
200 termios = None
200 termios = None
201
201
202 import binascii
202 import binascii
203 import functools
203 import functools
204 import os
204 import os
205 import pickle
205 import pickle
206 import struct
206 import struct
207
207
208 from mercurial.i18n import _
208 from mercurial.i18n import _
209 from mercurial.pycompat import (
209 from mercurial.pycompat import (
210 open,
210 open,
211 )
211 )
212 from mercurial.node import (
212 from mercurial.node import (
213 bin,
213 bin,
214 hex,
214 hex,
215 short,
215 short,
216 )
216 )
217 from mercurial import (
217 from mercurial import (
218 bundle2,
218 bundle2,
219 cmdutil,
219 cmdutil,
220 context,
220 context,
221 copies,
221 copies,
222 destutil,
222 destutil,
223 discovery,
223 discovery,
224 encoding,
224 encoding,
225 error,
225 error,
226 exchange,
226 exchange,
227 extensions,
227 extensions,
228 hg,
228 hg,
229 logcmdutil,
229 logcmdutil,
230 merge as mergemod,
230 merge as mergemod,
231 mergestate as mergestatemod,
231 mergestate as mergestatemod,
232 mergeutil,
232 mergeutil,
233 obsolete,
233 obsolete,
234 pycompat,
234 pycompat,
235 registrar,
235 registrar,
236 repair,
236 repair,
237 rewriteutil,
237 rewriteutil,
238 scmutil,
238 scmutil,
239 state as statemod,
239 state as statemod,
240 util,
240 util,
241 )
241 )
242 from mercurial.utils import (
242 from mercurial.utils import (
243 dateutil,
243 dateutil,
244 stringutil,
244 stringutil,
245 urlutil,
245 urlutil,
246 )
246 )
247
247
248 cmdtable = {}
248 cmdtable = {}
249 command = registrar.command(cmdtable)
249 command = registrar.command(cmdtable)
250
250
251 configtable = {}
251 configtable = {}
252 configitem = registrar.configitem(configtable)
252 configitem = registrar.configitem(configtable)
253 configitem(
253 configitem(
254 b'experimental',
254 b'experimental',
255 b'histedit.autoverb',
255 b'histedit.autoverb',
256 default=False,
256 default=False,
257 )
257 )
258 configitem(
258 configitem(
259 b'histedit',
259 b'histedit',
260 b'defaultrev',
260 b'defaultrev',
261 default=None,
261 default=None,
262 )
262 )
263 configitem(
263 configitem(
264 b'histedit',
264 b'histedit',
265 b'dropmissing',
265 b'dropmissing',
266 default=False,
266 default=False,
267 )
267 )
268 configitem(
268 configitem(
269 b'histedit',
269 b'histedit',
270 b'linelen',
270 b'linelen',
271 default=80,
271 default=80,
272 )
272 )
273 configitem(
273 configitem(
274 b'histedit',
274 b'histedit',
275 b'singletransaction',
275 b'singletransaction',
276 default=False,
276 default=False,
277 )
277 )
278 configitem(
278 configitem(
279 b'ui',
279 b'ui',
280 b'interface.histedit',
280 b'interface.histedit',
281 default=None,
281 default=None,
282 )
282 )
283 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
283 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
284 # TODO: Teach the text-based histedit interface to respect this config option
284 # TODO: Teach the text-based histedit interface to respect this config option
285 # before we make it non-experimental.
285 # before we make it non-experimental.
286 configitem(
286 configitem(
287 b'histedit', b'later-commits-first', default=False, experimental=True
287 b'histedit', b'later-commits-first', default=False, experimental=True
288 )
288 )
289
289
290 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
290 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
291 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
291 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
292 # be specifying the version(s) of Mercurial they are tested with, or
292 # be specifying the version(s) of Mercurial they are tested with, or
293 # leave the attribute unspecified.
293 # leave the attribute unspecified.
294 testedwith = b'ships-with-hg-core'
294 testedwith = b'ships-with-hg-core'
295
295
296 actiontable = {}
296 actiontable = {}
297 primaryactions = set()
297 primaryactions = set()
298 secondaryactions = set()
298 secondaryactions = set()
299 tertiaryactions = set()
299 tertiaryactions = set()
300 internalactions = set()
300 internalactions = set()
301
301
302
302
303 def geteditcomment(ui, first, last):
303 def geteditcomment(ui, first, last):
304 """construct the editor comment
304 """construct the editor comment
305 The comment includes::
305 The comment includes::
306 - an intro
306 - an intro
307 - sorted primary commands
307 - sorted primary commands
308 - sorted short commands
308 - sorted short commands
309 - sorted long commands
309 - sorted long commands
310 - additional hints
310 - additional hints
311
311
312 Commands are only included once.
312 Commands are only included once.
313 """
313 """
314 intro = _(
314 intro = _(
315 b"""Edit history between %s and %s
315 b"""Edit history between %s and %s
316
316
317 Commits are listed from least to most recent
317 Commits are listed from least to most recent
318
318
319 You can reorder changesets by reordering the lines
319 You can reorder changesets by reordering the lines
320
320
321 Commands:
321 Commands:
322 """
322 """
323 )
323 )
324 actions = []
324 actions = []
325
325
326 def addverb(v):
326 def addverb(v):
327 a = actiontable[v]
327 a = actiontable[v]
328 lines = a.message.split(b"\n")
328 lines = a.message.split(b"\n")
329 if len(a.verbs):
329 if len(a.verbs):
330 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
330 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
331 actions.append(b" %s = %s" % (v, lines[0]))
331 actions.append(b" %s = %s" % (v, lines[0]))
332 actions.extend([b' %s'] * (len(lines) - 1))
332 actions.extend([b' %s'] * (len(lines) - 1))
333
333
334 for v in (
334 for v in (
335 sorted(primaryactions)
335 sorted(primaryactions)
336 + sorted(secondaryactions)
336 + sorted(secondaryactions)
337 + sorted(tertiaryactions)
337 + sorted(tertiaryactions)
338 ):
338 ):
339 addverb(v)
339 addverb(v)
340 actions.append(b'')
340 actions.append(b'')
341
341
342 hints = []
342 hints = []
343 if ui.configbool(b'histedit', b'dropmissing'):
343 if ui.configbool(b'histedit', b'dropmissing'):
344 hints.append(
344 hints.append(
345 b"Deleting a changeset from the list "
345 b"Deleting a changeset from the list "
346 b"will DISCARD it from the edited history!"
346 b"will DISCARD it from the edited history!"
347 )
347 )
348
348
349 lines = (intro % (first, last)).split(b'\n') + actions + hints
349 lines = (intro % (first, last)).split(b'\n') + actions + hints
350
350
351 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
351 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
352
352
353
353
354 class histeditstate:
354 class histeditstate:
355 def __init__(self, repo):
355 def __init__(self, repo):
356 self.repo = repo
356 self.repo = repo
357 self.actions = None
357 self.actions = None
358 self.keep = None
358 self.keep = None
359 self.topmost = None
359 self.topmost = None
360 self.parentctxnode = None
360 self.parentctxnode = None
361 self.lock = None
361 self.lock = None
362 self.wlock = None
362 self.wlock = None
363 self.backupfile = None
363 self.backupfile = None
364 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
364 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
365 self.replacements = []
365 self.replacements = []
366
366
367 def read(self):
367 def read(self):
368 """Load histedit state from disk and set fields appropriately."""
368 """Load histedit state from disk and set fields appropriately."""
369 if not self.stateobj.exists():
369 if not self.stateobj.exists():
370 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
370 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
371
371
372 data = self._read()
372 data = self._read()
373
373
374 self.parentctxnode = data[b'parentctxnode']
374 self.parentctxnode = data[b'parentctxnode']
375 actions = parserules(data[b'rules'], self)
375 actions = parserules(data[b'rules'], self)
376 self.actions = actions
376 self.actions = actions
377 self.keep = data[b'keep']
377 self.keep = data[b'keep']
378 self.topmost = data[b'topmost']
378 self.topmost = data[b'topmost']
379 self.replacements = data[b'replacements']
379 self.replacements = data[b'replacements']
380 self.backupfile = data[b'backupfile']
380 self.backupfile = data[b'backupfile']
381
381
382 def _read(self):
382 def _read(self):
383 fp = self.repo.vfs.read(b'histedit-state')
383 fp = self.repo.vfs.read(b'histedit-state')
384 if fp.startswith(b'v1\n'):
384 if fp.startswith(b'v1\n'):
385 data = self._load()
385 data = self._load()
386 parentctxnode, rules, keep, topmost, replacements, backupfile = data
386 parentctxnode, rules, keep, topmost, replacements, backupfile = data
387 else:
387 else:
388 data = pickle.loads(fp)
388 data = pickle.loads(fp)
389 parentctxnode, rules, keep, topmost, replacements = data
389 parentctxnode, rules, keep, topmost, replacements = data
390 backupfile = None
390 backupfile = None
391 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
391 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
392
392
393 return {
393 return {
394 b'parentctxnode': parentctxnode,
394 b'parentctxnode': parentctxnode,
395 b"rules": rules,
395 b"rules": rules,
396 b"keep": keep,
396 b"keep": keep,
397 b"topmost": topmost,
397 b"topmost": topmost,
398 b"replacements": replacements,
398 b"replacements": replacements,
399 b"backupfile": backupfile,
399 b"backupfile": backupfile,
400 }
400 }
401
401
402 def write(self, tr=None):
402 def write(self, tr=None):
403 if tr:
403 if tr:
404 tr.addfilegenerator(
404 tr.addfilegenerator(
405 b'histedit-state',
405 b'histedit-state',
406 (b'histedit-state',),
406 (b'histedit-state',),
407 self._write,
407 self._write,
408 location=b'plain',
408 location=b'plain',
409 )
409 )
410 else:
410 else:
411 with self.repo.vfs(b"histedit-state", b"w") as f:
411 with self.repo.vfs(b"histedit-state", b"w") as f:
412 self._write(f)
412 self._write(f)
413
413
414 def _write(self, fp):
414 def _write(self, fp):
415 fp.write(b'v1\n')
415 fp.write(b'v1\n')
416 fp.write(b'%s\n' % hex(self.parentctxnode))
416 fp.write(b'%s\n' % hex(self.parentctxnode))
417 fp.write(b'%s\n' % hex(self.topmost))
417 fp.write(b'%s\n' % hex(self.topmost))
418 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
418 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
419 fp.write(b'%d\n' % len(self.actions))
419 fp.write(b'%d\n' % len(self.actions))
420 for action in self.actions:
420 for action in self.actions:
421 fp.write(b'%s\n' % action.tostate())
421 fp.write(b'%s\n' % action.tostate())
422 fp.write(b'%d\n' % len(self.replacements))
422 fp.write(b'%d\n' % len(self.replacements))
423 for replacement in self.replacements:
423 for replacement in self.replacements:
424 fp.write(
424 fp.write(
425 b'%s%s\n'
425 b'%s%s\n'
426 % (
426 % (
427 hex(replacement[0]),
427 hex(replacement[0]),
428 b''.join(hex(r) for r in replacement[1]),
428 b''.join(hex(r) for r in replacement[1]),
429 )
429 )
430 )
430 )
431 backupfile = self.backupfile
431 backupfile = self.backupfile
432 if not backupfile:
432 if not backupfile:
433 backupfile = b''
433 backupfile = b''
434 fp.write(b'%s\n' % backupfile)
434 fp.write(b'%s\n' % backupfile)
435
435
436 def _load(self):
436 def _load(self):
437 fp = self.repo.vfs(b'histedit-state', b'r')
437 fp = self.repo.vfs(b'histedit-state', b'r')
438 lines = [l[:-1] for l in fp.readlines()]
438 lines = [l[:-1] for l in fp.readlines()]
439
439
440 index = 0
440 index = 0
441 lines[index] # version number
441 lines[index] # version number
442 index += 1
442 index += 1
443
443
444 parentctxnode = bin(lines[index])
444 parentctxnode = bin(lines[index])
445 index += 1
445 index += 1
446
446
447 topmost = bin(lines[index])
447 topmost = bin(lines[index])
448 index += 1
448 index += 1
449
449
450 keep = lines[index] == b'True'
450 keep = lines[index] == b'True'
451 index += 1
451 index += 1
452
452
453 # Rules
453 # Rules
454 rules = []
454 rules = []
455 rulelen = int(lines[index])
455 rulelen = int(lines[index])
456 index += 1
456 index += 1
457 for i in range(rulelen):
457 for i in range(rulelen):
458 ruleaction = lines[index]
458 ruleaction = lines[index]
459 index += 1
459 index += 1
460 rule = lines[index]
460 rule = lines[index]
461 index += 1
461 index += 1
462 rules.append((ruleaction, rule))
462 rules.append((ruleaction, rule))
463
463
464 # Replacements
464 # Replacements
465 replacements = []
465 replacements = []
466 replacementlen = int(lines[index])
466 replacementlen = int(lines[index])
467 index += 1
467 index += 1
468 for i in range(replacementlen):
468 for i in range(replacementlen):
469 replacement = lines[index]
469 replacement = lines[index]
470 original = bin(replacement[:40])
470 original = bin(replacement[:40])
471 succ = [
471 succ = [
472 bin(replacement[i : i + 40])
472 bin(replacement[i : i + 40])
473 for i in range(40, len(replacement), 40)
473 for i in range(40, len(replacement), 40)
474 ]
474 ]
475 replacements.append((original, succ))
475 replacements.append((original, succ))
476 index += 1
476 index += 1
477
477
478 backupfile = lines[index]
478 backupfile = lines[index]
479 index += 1
479 index += 1
480
480
481 fp.close()
481 fp.close()
482
482
483 return parentctxnode, rules, keep, topmost, replacements, backupfile
483 return parentctxnode, rules, keep, topmost, replacements, backupfile
484
484
485 def clear(self):
485 def clear(self):
486 if self.inprogress():
486 if self.inprogress():
487 self.repo.vfs.unlink(b'histedit-state')
487 self.repo.vfs.unlink(b'histedit-state')
488
488
489 def inprogress(self):
489 def inprogress(self):
490 return self.repo.vfs.exists(b'histedit-state')
490 return self.repo.vfs.exists(b'histedit-state')
491
491
492
492
493 class histeditaction:
493 class histeditaction:
494 def __init__(self, state, node):
494 def __init__(self, state, node):
495 self.state = state
495 self.state = state
496 self.repo = state.repo
496 self.repo = state.repo
497 self.node = node
497 self.node = node
498
498
499 @classmethod
499 @classmethod
500 def fromrule(cls, state, rule):
500 def fromrule(cls, state, rule):
501 """Parses the given rule, returning an instance of the histeditaction."""
501 """Parses the given rule, returning an instance of the histeditaction."""
502 ruleid = rule.strip().split(b' ', 1)[0]
502 ruleid = rule.strip().split(b' ', 1)[0]
503 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
503 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
504 # Check for validation of rule ids and get the rulehash
504 # Check for validation of rule ids and get the rulehash
505 try:
505 try:
506 rev = bin(ruleid)
506 rev = bin(ruleid)
507 except binascii.Error:
507 except binascii.Error:
508 try:
508 try:
509 _ctx = scmutil.revsingle(state.repo, ruleid)
509 _ctx = scmutil.revsingle(state.repo, ruleid)
510 rulehash = _ctx.hex()
510 rulehash = _ctx.hex()
511 rev = bin(rulehash)
511 rev = bin(rulehash)
512 except error.RepoLookupError:
512 except error.RepoLookupError:
513 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
513 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
514 return cls(state, rev)
514 return cls(state, rev)
515
515
516 def verify(self, prev, expected, seen):
516 def verify(self, prev, expected, seen):
517 """Verifies semantic correctness of the rule"""
517 """Verifies semantic correctness of the rule"""
518 repo = self.repo
518 repo = self.repo
519 ha = hex(self.node)
519 ha = hex(self.node)
520 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
520 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
521 if self.node is None:
521 if self.node is None:
522 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
522 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
523 self._verifynodeconstraints(prev, expected, seen)
523 self._verifynodeconstraints(prev, expected, seen)
524
524
525 def _verifynodeconstraints(self, prev, expected, seen):
525 def _verifynodeconstraints(self, prev, expected, seen):
526 # by default command need a node in the edited list
526 # by default command need a node in the edited list
527 if self.node not in expected:
527 if self.node not in expected:
528 raise error.ParseError(
528 raise error.ParseError(
529 _(b'%s "%s" changeset was not a candidate')
529 _(b'%s "%s" changeset was not a candidate')
530 % (self.verb, short(self.node)),
530 % (self.verb, short(self.node)),
531 hint=_(b'only use listed changesets'),
531 hint=_(b'only use listed changesets'),
532 )
532 )
533 # and only one command per node
533 # and only one command per node
534 if self.node in seen:
534 if self.node in seen:
535 raise error.ParseError(
535 raise error.ParseError(
536 _(b'duplicated command for changeset %s') % short(self.node)
536 _(b'duplicated command for changeset %s') % short(self.node)
537 )
537 )
538
538
539 def torule(self):
539 def torule(self):
540 """build a histedit rule line for an action
540 """build a histedit rule line for an action
541
541
542 by default lines are in the form:
542 by default lines are in the form:
543 <hash> <rev> <summary>
543 <hash> <rev> <summary>
544 """
544 """
545 ctx = self.repo[self.node]
545 ctx = self.repo[self.node]
546 ui = self.repo.ui
546 ui = self.repo.ui
547 # We don't want color codes in the commit message template, so
547 # We don't want color codes in the commit message template, so
548 # disable the label() template function while we render it.
548 # disable the label() template function while we render it.
549 with ui.configoverride(
549 with ui.configoverride(
550 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
550 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
551 ):
551 ):
552 summary = cmdutil.rendertemplate(
552 summary = cmdutil.rendertemplate(
553 ctx, ui.config(b'histedit', b'summary-template')
553 ctx, ui.config(b'histedit', b'summary-template')
554 )
554 )
555 line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary))
555 line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary))
556 # trim to 75 columns by default so it's not stupidly wide in my editor
556 # trim to 75 columns by default so it's not stupidly wide in my editor
557 # (the 5 more are left for verb)
557 # (the 5 more are left for verb)
558 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
558 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
559 maxlen = max(maxlen, 22) # avoid truncating hash
559 maxlen = max(maxlen, 22) # avoid truncating hash
560 return stringutil.ellipsis(line, maxlen)
560 return stringutil.ellipsis(line, maxlen)
561
561
562 def tostate(self):
562 def tostate(self):
563 """Print an action in format used by histedit state files
563 """Print an action in format used by histedit state files
564 (the first line is a verb, the remainder is the second)
564 (the first line is a verb, the remainder is the second)
565 """
565 """
566 return b"%s\n%s" % (self.verb, hex(self.node))
566 return b"%s\n%s" % (self.verb, hex(self.node))
567
567
568 def run(self):
568 def run(self):
569 """Runs the action. The default behavior is simply apply the action's
569 """Runs the action. The default behavior is simply apply the action's
570 rulectx onto the current parentctx."""
570 rulectx onto the current parentctx."""
571 self.applychange()
571 self.applychange()
572 self.continuedirty()
572 self.continuedirty()
573 return self.continueclean()
573 return self.continueclean()
574
574
575 def applychange(self):
575 def applychange(self):
576 """Applies the changes from this action's rulectx onto the current
576 """Applies the changes from this action's rulectx onto the current
577 parentctx, but does not commit them."""
577 parentctx, but does not commit them."""
578 repo = self.repo
578 repo = self.repo
579 rulectx = repo[self.node]
579 rulectx = repo[self.node]
580 with repo.ui.silent():
580 with repo.ui.silent():
581 hg.update(repo, self.state.parentctxnode, quietempty=True)
581 hg.update(repo, self.state.parentctxnode, quietempty=True)
582 stats = applychanges(repo.ui, repo, rulectx, {})
582 stats = applychanges(repo.ui, repo, rulectx, {})
583 repo.dirstate.setbranch(rulectx.branch(), repo.currenttransaction())
583 repo.dirstate.setbranch(rulectx.branch(), repo.currenttransaction())
584 if stats.unresolvedcount:
584 if stats.unresolvedcount:
585 raise error.InterventionRequired(
585 raise error.InterventionRequired(
586 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
586 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
587 hint=_(b'hg histedit --continue to resume'),
587 hint=_(b'hg histedit --continue to resume'),
588 )
588 )
589
589
590 def continuedirty(self):
590 def continuedirty(self):
591 """Continues the action when changes have been applied to the working
591 """Continues the action when changes have been applied to the working
592 copy. The default behavior is to commit the dirty changes."""
592 copy. The default behavior is to commit the dirty changes."""
593 repo = self.repo
593 repo = self.repo
594 rulectx = repo[self.node]
594 rulectx = repo[self.node]
595
595
596 editor = self.commiteditor()
596 editor = self.commiteditor()
597 commit = commitfuncfor(repo, rulectx)
597 commit = commitfuncfor(repo, rulectx)
598 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
598 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
599 date = dateutil.makedate()
599 date = dateutil.makedate()
600 else:
600 else:
601 date = rulectx.date()
601 date = rulectx.date()
602 commit(
602 commit(
603 text=rulectx.description(),
603 text=rulectx.description(),
604 user=rulectx.user(),
604 user=rulectx.user(),
605 date=date,
605 date=date,
606 extra=rulectx.extra(),
606 extra=rulectx.extra(),
607 editor=editor,
607 editor=editor,
608 )
608 )
609
609
610 def commiteditor(self):
610 def commiteditor(self):
611 """The editor to be used to edit the commit message."""
611 """The editor to be used to edit the commit message."""
612 return False
612 return False
613
613
614 def continueclean(self):
614 def continueclean(self):
615 """Continues the action when the working copy is clean. The default
615 """Continues the action when the working copy is clean. The default
616 behavior is to accept the current commit as the new version of the
616 behavior is to accept the current commit as the new version of the
617 rulectx."""
617 rulectx."""
618 ctx = self.repo[b'.']
618 ctx = self.repo[b'.']
619 if ctx.node() == self.state.parentctxnode:
619 if ctx.node() == self.state.parentctxnode:
620 self.repo.ui.warn(
620 self.repo.ui.warn(
621 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
621 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
622 )
622 )
623 return ctx, [(self.node, tuple())]
623 return ctx, [(self.node, tuple())]
624 if ctx.node() == self.node:
624 if ctx.node() == self.node:
625 # Nothing changed
625 # Nothing changed
626 return ctx, []
626 return ctx, []
627 return ctx, [(self.node, (ctx.node(),))]
627 return ctx, [(self.node, (ctx.node(),))]
628
628
629
629
630 def commitfuncfor(repo, src):
630 def commitfuncfor(repo, src):
631 """Build a commit function for the replacement of <src>
631 """Build a commit function for the replacement of <src>
632
632
633 This function ensure we apply the same treatment to all changesets.
633 This function ensure we apply the same treatment to all changesets.
634
634
635 - Add a 'histedit_source' entry in extra.
635 - Add a 'histedit_source' entry in extra.
636
636
637 Note that fold has its own separated logic because its handling is a bit
637 Note that fold has its own separated logic because its handling is a bit
638 different and not easily factored out of the fold method.
638 different and not easily factored out of the fold method.
639 """
639 """
640 phasemin = src.phase()
640 phasemin = src.phase()
641
641
642 def commitfunc(**kwargs):
642 def commitfunc(**kwargs):
643 overrides = {(b'phases', b'new-commit'): phasemin}
643 overrides = {(b'phases', b'new-commit'): phasemin}
644 with repo.ui.configoverride(overrides, b'histedit'):
644 with repo.ui.configoverride(overrides, b'histedit'):
645 extra = kwargs.get('extra', {}).copy()
645 extra = kwargs.get('extra', {}).copy()
646 extra[b'histedit_source'] = src.hex()
646 extra[b'histedit_source'] = src.hex()
647 kwargs['extra'] = extra
647 kwargs['extra'] = extra
648 return repo.commit(**kwargs)
648 return repo.commit(**kwargs)
649
649
650 return commitfunc
650 return commitfunc
651
651
652
652
653 def applychanges(ui, repo, ctx, opts):
653 def applychanges(ui, repo, ctx, opts):
654 """Merge changeset from ctx (only) in the current working directory"""
654 """Merge changeset from ctx (only) in the current working directory"""
655 if ctx.p1().node() == repo.dirstate.p1():
655 if ctx.p1().node() == repo.dirstate.p1():
656 # edits are "in place" we do not need to make any merge,
656 # edits are "in place" we do not need to make any merge,
657 # just applies changes on parent for editing
657 # just applies changes on parent for editing
658 with ui.silent():
658 with ui.silent():
659 cmdutil.revert(ui, repo, ctx, all=True)
659 cmdutil.revert(ui, repo, ctx, all=True)
660 stats = mergemod.updateresult(0, 0, 0, 0)
660 stats = mergemod.updateresult(0, 0, 0, 0)
661 else:
661 else:
662 try:
662 try:
663 # ui.forcemerge is an internal variable, do not document
663 # ui.forcemerge is an internal variable, do not document
664 repo.ui.setconfig(
664 repo.ui.setconfig(
665 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
665 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
666 )
666 )
667 stats = mergemod.graft(
667 stats = mergemod.graft(
668 repo,
668 repo,
669 ctx,
669 ctx,
670 labels=[
670 labels=[
671 b'already edited',
671 b'already edited',
672 b'current change',
672 b'current change',
673 b'parent of current change',
673 b'parent of current change',
674 ],
674 ],
675 )
675 )
676 finally:
676 finally:
677 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
677 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
678 return stats
678 return stats
679
679
680
680
681 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
681 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
682 """collapse the set of revisions from first to last as new one.
682 """collapse the set of revisions from first to last as new one.
683
683
684 Expected commit options are:
684 Expected commit options are:
685 - message
685 - message
686 - date
686 - date
687 - username
687 - username
688 Commit message is edited in all cases.
688 Commit message is edited in all cases.
689
689
690 This function works in memory."""
690 This function works in memory."""
691 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
691 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
692 if not ctxs:
692 if not ctxs:
693 return None
693 return None
694 for c in ctxs:
694 for c in ctxs:
695 if not c.mutable():
695 if not c.mutable():
696 raise error.ParseError(
696 raise error.ParseError(
697 _(b"cannot fold into public change %s") % short(c.node())
697 _(b"cannot fold into public change %s") % short(c.node())
698 )
698 )
699 base = firstctx.p1()
699 base = firstctx.p1()
700
700
701 # commit a new version of the old changeset, including the update
701 # commit a new version of the old changeset, including the update
702 # collect all files which might be affected
702 # collect all files which might be affected
703 files = set()
703 files = set()
704 for ctx in ctxs:
704 for ctx in ctxs:
705 files.update(ctx.files())
705 files.update(ctx.files())
706
706
707 # Recompute copies (avoid recording a -> b -> a)
707 # Recompute copies (avoid recording a -> b -> a)
708 copied = copies.pathcopies(base, lastctx)
708 copied = copies.pathcopies(base, lastctx)
709
709
710 # prune files which were reverted by the updates
710 # prune files which were reverted by the updates
711 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
711 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
712 # commit version of these files as defined by head
712 # commit version of these files as defined by head
713 headmf = lastctx.manifest()
713 headmf = lastctx.manifest()
714
714
715 def filectxfn(repo, ctx, path):
715 def filectxfn(repo, ctx, path):
716 if path in headmf:
716 if path in headmf:
717 fctx = lastctx[path]
717 fctx = lastctx[path]
718 flags = fctx.flags()
718 flags = fctx.flags()
719 mctx = context.memfilectx(
719 mctx = context.memfilectx(
720 repo,
720 repo,
721 ctx,
721 ctx,
722 fctx.path(),
722 fctx.path(),
723 fctx.data(),
723 fctx.data(),
724 islink=b'l' in flags,
724 islink=b'l' in flags,
725 isexec=b'x' in flags,
725 isexec=b'x' in flags,
726 copysource=copied.get(path),
726 copysource=copied.get(path),
727 )
727 )
728 return mctx
728 return mctx
729 return None
729 return None
730
730
731 if commitopts.get(b'message'):
731 if commitopts.get(b'message'):
732 message = commitopts[b'message']
732 message = commitopts[b'message']
733 else:
733 else:
734 message = firstctx.description()
734 message = firstctx.description()
735 user = commitopts.get(b'user')
735 user = commitopts.get(b'user')
736 date = commitopts.get(b'date')
736 date = commitopts.get(b'date')
737 extra = commitopts.get(b'extra')
737 extra = commitopts.get(b'extra')
738
738
739 parents = (firstctx.p1().node(), firstctx.p2().node())
739 parents = (firstctx.p1().node(), firstctx.p2().node())
740 editor = None
740 editor = None
741 if not skipprompt:
741 if not skipprompt:
742 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
742 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
743 new = context.memctx(
743 new = context.memctx(
744 repo,
744 repo,
745 parents=parents,
745 parents=parents,
746 text=message,
746 text=message,
747 files=files,
747 files=files,
748 filectxfn=filectxfn,
748 filectxfn=filectxfn,
749 user=user,
749 user=user,
750 date=date,
750 date=date,
751 extra=extra,
751 extra=extra,
752 editor=editor,
752 editor=editor,
753 )
753 )
754 return repo.commitctx(new)
754 return repo.commitctx(new)
755
755
756
756
757 def _isdirtywc(repo):
757 def _isdirtywc(repo):
758 return repo[None].dirty(missing=True)
758 return repo[None].dirty(missing=True)
759
759
760
760
761 def abortdirty():
761 def abortdirty():
762 raise error.StateError(
762 raise error.StateError(
763 _(b'working copy has pending changes'),
763 _(b'working copy has pending changes'),
764 hint=_(
764 hint=_(
765 b'amend, commit, or revert them and run histedit '
765 b'amend, commit, or revert them and run histedit '
766 b'--continue, or abort with histedit --abort'
766 b'--continue, or abort with histedit --abort'
767 ),
767 ),
768 )
768 )
769
769
770
770
771 def action(verbs, message, priority=False, internal=False):
771 def action(verbs, message, priority=False, internal=False):
772 def wrap(cls):
772 def wrap(cls):
773 assert not priority or not internal
773 assert not priority or not internal
774 verb = verbs[0]
774 verb = verbs[0]
775 if priority:
775 if priority:
776 primaryactions.add(verb)
776 primaryactions.add(verb)
777 elif internal:
777 elif internal:
778 internalactions.add(verb)
778 internalactions.add(verb)
779 elif len(verbs) > 1:
779 elif len(verbs) > 1:
780 secondaryactions.add(verb)
780 secondaryactions.add(verb)
781 else:
781 else:
782 tertiaryactions.add(verb)
782 tertiaryactions.add(verb)
783
783
784 cls.verb = verb
784 cls.verb = verb
785 cls.verbs = verbs
785 cls.verbs = verbs
786 cls.message = message
786 cls.message = message
787 for verb in verbs:
787 for verb in verbs:
788 actiontable[verb] = cls
788 actiontable[verb] = cls
789 return cls
789 return cls
790
790
791 return wrap
791 return wrap
792
792
793
793
794 @action([b'pick', b'p'], _(b'use commit'), priority=True)
794 @action([b'pick', b'p'], _(b'use commit'), priority=True)
795 class pick(histeditaction):
795 class pick(histeditaction):
796 def run(self):
796 def run(self):
797 rulectx = self.repo[self.node]
797 rulectx = self.repo[self.node]
798 if rulectx.p1().node() == self.state.parentctxnode:
798 if rulectx.p1().node() == self.state.parentctxnode:
799 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
799 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
800 return rulectx, []
800 return rulectx, []
801
801
802 return super(pick, self).run()
802 return super(pick, self).run()
803
803
804
804
805 @action(
805 @action(
806 [b'edit', b'e'],
806 [b'edit', b'e'],
807 _(b'use commit, but allow edits before making new commit'),
807 _(b'use commit, but allow edits before making new commit'),
808 priority=True,
808 priority=True,
809 )
809 )
810 class edit(histeditaction):
810 class edit(histeditaction):
811 def run(self):
811 def run(self):
812 repo = self.repo
812 repo = self.repo
813 rulectx = repo[self.node]
813 rulectx = repo[self.node]
814 hg.update(repo, self.state.parentctxnode, quietempty=True)
814 hg.update(repo, self.state.parentctxnode, quietempty=True)
815 applychanges(repo.ui, repo, rulectx, {})
815 applychanges(repo.ui, repo, rulectx, {})
816 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
816 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
817 raise error.InterventionRequired(
817 raise error.InterventionRequired(
818 _(b'Editing (%s), commit as needed now to split the change')
818 _(b'Editing (%s), commit as needed now to split the change')
819 % short(self.node),
819 % short(self.node),
820 hint=hint % short(self.node),
820 hint=hint % short(self.node),
821 )
821 )
822
822
823 def commiteditor(self):
823 def commiteditor(self):
824 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
824 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
825
825
826
826
827 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
827 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
828 class fold(histeditaction):
828 class fold(histeditaction):
829 def verify(self, prev, expected, seen):
829 def verify(self, prev, expected, seen):
830 """Verifies semantic correctness of the fold rule"""
830 """Verifies semantic correctness of the fold rule"""
831 super(fold, self).verify(prev, expected, seen)
831 super(fold, self).verify(prev, expected, seen)
832 repo = self.repo
832 repo = self.repo
833 if not prev:
833 if not prev:
834 c = repo[self.node].p1()
834 c = repo[self.node].p1()
835 elif not prev.verb in (b'pick', b'base'):
835 elif not prev.verb in (b'pick', b'base'):
836 return
836 return
837 else:
837 else:
838 c = repo[prev.node]
838 c = repo[prev.node]
839 if not c.mutable():
839 if not c.mutable():
840 raise error.ParseError(
840 raise error.ParseError(
841 _(b"cannot fold into public change %s") % short(c.node())
841 _(b"cannot fold into public change %s") % short(c.node())
842 )
842 )
843
843
844 def continuedirty(self):
844 def continuedirty(self):
845 repo = self.repo
845 repo = self.repo
846 rulectx = repo[self.node]
846 rulectx = repo[self.node]
847
847
848 commit = commitfuncfor(repo, rulectx)
848 commit = commitfuncfor(repo, rulectx)
849 commit(
849 commit(
850 text=b'fold-temp-revision %s' % short(self.node),
850 text=b'fold-temp-revision %s' % short(self.node),
851 user=rulectx.user(),
851 user=rulectx.user(),
852 date=rulectx.date(),
852 date=rulectx.date(),
853 extra=rulectx.extra(),
853 extra=rulectx.extra(),
854 )
854 )
855
855
856 def continueclean(self):
856 def continueclean(self):
857 repo = self.repo
857 repo = self.repo
858 ctx = repo[b'.']
858 ctx = repo[b'.']
859 rulectx = repo[self.node]
859 rulectx = repo[self.node]
860 parentctxnode = self.state.parentctxnode
860 parentctxnode = self.state.parentctxnode
861 if ctx.node() == parentctxnode:
861 if ctx.node() == parentctxnode:
862 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
862 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
863 return ctx, [(self.node, (parentctxnode,))]
863 return ctx, [(self.node, (parentctxnode,))]
864
864
865 parentctx = repo[parentctxnode]
865 parentctx = repo[parentctxnode]
866 newcommits = {
866 newcommits = {
867 c.node()
867 c.node()
868 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
868 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
869 }
869 }
870 if not newcommits:
870 if not newcommits:
871 repo.ui.warn(
871 repo.ui.warn(
872 _(
872 _(
873 b'%s: cannot fold - working copy is not a '
873 b'%s: cannot fold - working copy is not a '
874 b'descendant of previous commit %s\n'
874 b'descendant of previous commit %s\n'
875 )
875 )
876 % (short(self.node), short(parentctxnode))
876 % (short(self.node), short(parentctxnode))
877 )
877 )
878 return ctx, [(self.node, (ctx.node(),))]
878 return ctx, [(self.node, (ctx.node(),))]
879
879
880 middlecommits = newcommits.copy()
880 middlecommits = newcommits.copy()
881 middlecommits.discard(ctx.node())
881 middlecommits.discard(ctx.node())
882
882
883 return self.finishfold(
883 return self.finishfold(
884 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
884 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
885 )
885 )
886
886
887 def skipprompt(self):
887 def skipprompt(self):
888 """Returns true if the rule should skip the message editor.
888 """Returns true if the rule should skip the message editor.
889
889
890 For example, 'fold' wants to show an editor, but 'rollup'
890 For example, 'fold' wants to show an editor, but 'rollup'
891 doesn't want to.
891 doesn't want to.
892 """
892 """
893 return False
893 return False
894
894
895 def mergedescs(self):
895 def mergedescs(self):
896 """Returns true if the rule should merge messages of multiple changes.
896 """Returns true if the rule should merge messages of multiple changes.
897
897
898 This exists mainly so that 'rollup' rules can be a subclass of
898 This exists mainly so that 'rollup' rules can be a subclass of
899 'fold'.
899 'fold'.
900 """
900 """
901 return True
901 return True
902
902
903 def firstdate(self):
903 def firstdate(self):
904 """Returns true if the rule should preserve the date of the first
904 """Returns true if the rule should preserve the date of the first
905 change.
905 change.
906
906
907 This exists mainly so that 'rollup' rules can be a subclass of
907 This exists mainly so that 'rollup' rules can be a subclass of
908 'fold'.
908 'fold'.
909 """
909 """
910 return False
910 return False
911
911
912 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
912 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
913 mergemod.update(ctx.p1())
913 mergemod.update(ctx.p1())
914 ### prepare new commit data
914 ### prepare new commit data
915 commitopts = {}
915 commitopts = {}
916 commitopts[b'user'] = ctx.user()
916 commitopts[b'user'] = ctx.user()
917 # commit message
917 # commit message
918 if not self.mergedescs():
918 if not self.mergedescs():
919 newmessage = ctx.description()
919 newmessage = ctx.description()
920 else:
920 else:
921 newmessage = (
921 newmessage = (
922 b'\n***\n'.join(
922 b'\n***\n'.join(
923 [ctx.description()]
923 [ctx.description()]
924 + [repo[r].description() for r in internalchanges]
924 + [repo[r].description() for r in internalchanges]
925 + [oldctx.description()]
925 + [oldctx.description()]
926 )
926 )
927 + b'\n'
927 + b'\n'
928 )
928 )
929 commitopts[b'message'] = newmessage
929 commitopts[b'message'] = newmessage
930 # date
930 # date
931 if self.firstdate():
931 if self.firstdate():
932 commitopts[b'date'] = ctx.date()
932 commitopts[b'date'] = ctx.date()
933 else:
933 else:
934 commitopts[b'date'] = max(ctx.date(), oldctx.date())
934 commitopts[b'date'] = max(ctx.date(), oldctx.date())
935 # if date is to be updated to current
935 # if date is to be updated to current
936 if ui.configbool(b'rewrite', b'update-timestamp'):
936 if ui.configbool(b'rewrite', b'update-timestamp'):
937 commitopts[b'date'] = dateutil.makedate()
937 commitopts[b'date'] = dateutil.makedate()
938
938
939 extra = ctx.extra().copy()
939 extra = ctx.extra().copy()
940 # histedit_source
940 # histedit_source
941 # note: ctx is likely a temporary commit but that the best we can do
941 # note: ctx is likely a temporary commit but that the best we can do
942 # here. This is sufficient to solve issue3681 anyway.
942 # here. This is sufficient to solve issue3681 anyway.
943 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
943 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
944 commitopts[b'extra'] = extra
944 commitopts[b'extra'] = extra
945 phasemin = max(ctx.phase(), oldctx.phase())
945 phasemin = max(ctx.phase(), oldctx.phase())
946 overrides = {(b'phases', b'new-commit'): phasemin}
946 overrides = {(b'phases', b'new-commit'): phasemin}
947 with repo.ui.configoverride(overrides, b'histedit'):
947 with repo.ui.configoverride(overrides, b'histedit'):
948 n = collapse(
948 n = collapse(
949 repo,
949 repo,
950 ctx,
950 ctx,
951 repo[newnode],
951 repo[newnode],
952 commitopts,
952 commitopts,
953 skipprompt=self.skipprompt(),
953 skipprompt=self.skipprompt(),
954 )
954 )
955 if n is None:
955 if n is None:
956 return ctx, []
956 return ctx, []
957 mergemod.update(repo[n])
957 mergemod.update(repo[n])
958 replacements = [
958 replacements = [
959 (oldctx.node(), (newnode,)),
959 (oldctx.node(), (newnode,)),
960 (ctx.node(), (n,)),
960 (ctx.node(), (n,)),
961 (newnode, (n,)),
961 (newnode, (n,)),
962 ]
962 ]
963 for ich in internalchanges:
963 for ich in internalchanges:
964 replacements.append((ich, (n,)))
964 replacements.append((ich, (n,)))
965 return repo[n], replacements
965 return repo[n], replacements
966
966
967
967
968 @action(
968 @action(
969 [b'base', b'b'],
969 [b'base', b'b'],
970 _(b'checkout changeset and apply further changesets from there'),
970 _(b'checkout changeset and apply further changesets from there'),
971 )
971 )
972 class base(histeditaction):
972 class base(histeditaction):
973 def run(self):
973 def run(self):
974 if self.repo[b'.'].node() != self.node:
974 if self.repo[b'.'].node() != self.node:
975 mergemod.clean_update(self.repo[self.node])
975 mergemod.clean_update(self.repo[self.node])
976 return self.continueclean()
976 return self.continueclean()
977
977
978 def continuedirty(self):
978 def continuedirty(self):
979 abortdirty()
979 abortdirty()
980
980
981 def continueclean(self):
981 def continueclean(self):
982 basectx = self.repo[b'.']
982 basectx = self.repo[b'.']
983 return basectx, []
983 return basectx, []
984
984
985 def _verifynodeconstraints(self, prev, expected, seen):
985 def _verifynodeconstraints(self, prev, expected, seen):
986 # base can only be use with a node not in the edited set
986 # base can only be use with a node not in the edited set
987 if self.node in expected:
987 if self.node in expected:
988 msg = _(b'%s "%s" changeset was an edited list candidate')
988 msg = _(b'%s "%s" changeset was an edited list candidate')
989 raise error.ParseError(
989 raise error.ParseError(
990 msg % (self.verb, short(self.node)),
990 msg % (self.verb, short(self.node)),
991 hint=_(b'base must only use unlisted changesets'),
991 hint=_(b'base must only use unlisted changesets'),
992 )
992 )
993
993
994
994
995 @action(
995 @action(
996 [b'_multifold'],
996 [b'_multifold'],
997 _(
997 _(
998 b"""fold subclass used for when multiple folds happen in a row
998 b"""fold subclass used for when multiple folds happen in a row
999
999
1000 We only want to fire the editor for the folded message once when
1000 We only want to fire the editor for the folded message once when
1001 (say) four changes are folded down into a single change. This is
1001 (say) four changes are folded down into a single change. This is
1002 similar to rollup, but we should preserve both messages so that
1002 similar to rollup, but we should preserve both messages so that
1003 when the last fold operation runs we can show the user all the
1003 when the last fold operation runs we can show the user all the
1004 commit messages in their editor.
1004 commit messages in their editor.
1005 """
1005 """
1006 ),
1006 ),
1007 internal=True,
1007 internal=True,
1008 )
1008 )
1009 class _multifold(fold):
1009 class _multifold(fold):
1010 def skipprompt(self):
1010 def skipprompt(self):
1011 return True
1011 return True
1012
1012
1013
1013
1014 @action(
1014 @action(
1015 [b"roll", b"r"],
1015 [b"roll", b"r"],
1016 _(b"like fold, but discard this commit's description and date"),
1016 _(b"like fold, but discard this commit's description and date"),
1017 )
1017 )
1018 class rollup(fold):
1018 class rollup(fold):
1019 def mergedescs(self):
1019 def mergedescs(self):
1020 return False
1020 return False
1021
1021
1022 def skipprompt(self):
1022 def skipprompt(self):
1023 return True
1023 return True
1024
1024
1025 def firstdate(self):
1025 def firstdate(self):
1026 return True
1026 return True
1027
1027
1028
1028
1029 @action([b"drop", b"d"], _(b'remove commit from history'))
1029 @action([b"drop", b"d"], _(b'remove commit from history'))
1030 class drop(histeditaction):
1030 class drop(histeditaction):
1031 def run(self):
1031 def run(self):
1032 parentctx = self.repo[self.state.parentctxnode]
1032 parentctx = self.repo[self.state.parentctxnode]
1033 return parentctx, [(self.node, tuple())]
1033 return parentctx, [(self.node, tuple())]
1034
1034
1035
1035
1036 @action(
1036 @action(
1037 [b"mess", b"m"],
1037 [b"mess", b"m"],
1038 _(b'edit commit message without changing commit content'),
1038 _(b'edit commit message without changing commit content'),
1039 priority=True,
1039 priority=True,
1040 )
1040 )
1041 class message(histeditaction):
1041 class message(histeditaction):
1042 def commiteditor(self):
1042 def commiteditor(self):
1043 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1043 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1044
1044
1045
1045
1046 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1046 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1047 """utility function to find the first outgoing changeset
1047 """utility function to find the first outgoing changeset
1048
1048
1049 Used by initialization code"""
1049 Used by initialization code"""
1050 if opts is None:
1050 if opts is None:
1051 opts = {}
1051 opts = {}
1052 path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
1052 path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
1053
1053
1054 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1054 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1055
1055
1056 revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
1056 revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
1057 other = hg.peer(repo, opts, path)
1057 other = hg.peer(repo, opts, path)
1058
1058
1059 if revs:
1059 if revs:
1060 revs = [repo.lookup(rev) for rev in revs]
1060 revs = [repo.lookup(rev) for rev in revs]
1061
1061
1062 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1062 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1063 if not outgoing.missing:
1063 if not outgoing.missing:
1064 raise error.StateError(_(b'no outgoing ancestors'))
1064 raise error.StateError(_(b'no outgoing ancestors'))
1065 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1065 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1066 if len(roots) > 1:
1066 if len(roots) > 1:
1067 msg = _(b'there are ambiguous outgoing revisions')
1067 msg = _(b'there are ambiguous outgoing revisions')
1068 hint = _(b"see 'hg help histedit' for more detail")
1068 hint = _(b"see 'hg help histedit' for more detail")
1069 raise error.StateError(msg, hint=hint)
1069 raise error.StateError(msg, hint=hint)
1070 return repo[roots[0]].node()
1070 return repo[roots[0]].node()
1071
1071
1072
1072
1073 # Curses Support
1073 # Curses Support
1074 try:
1074 try:
1075 import curses
1075 import curses
1076 except ImportError:
1076 except ImportError:
1077 curses = None
1077 curses = None
1078
1078
1079 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1079 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1080 ACTION_LABELS = {
1080 ACTION_LABELS = {
1081 b'fold': b'^fold',
1081 b'fold': b'^fold',
1082 b'roll': b'^roll',
1082 b'roll': b'^roll',
1083 }
1083 }
1084
1084
1085 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1085 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1086 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1086 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1087 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1087 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1088
1088
1089 E_QUIT, E_HISTEDIT = 1, 2
1089 E_QUIT, E_HISTEDIT = 1, 2
1090 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1090 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1091 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1091 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1092
1092
1093 KEYTABLE = {
1093 KEYTABLE = {
1094 b'global': {
1094 b'global': {
1095 b'h': b'next-action',
1095 b'h': b'next-action',
1096 b'KEY_RIGHT': b'next-action',
1096 b'KEY_RIGHT': b'next-action',
1097 b'l': b'prev-action',
1097 b'l': b'prev-action',
1098 b'KEY_LEFT': b'prev-action',
1098 b'KEY_LEFT': b'prev-action',
1099 b'q': b'quit',
1099 b'q': b'quit',
1100 b'c': b'histedit',
1100 b'c': b'histedit',
1101 b'C': b'histedit',
1101 b'C': b'histedit',
1102 b'v': b'showpatch',
1102 b'v': b'showpatch',
1103 b'?': b'help',
1103 b'?': b'help',
1104 },
1104 },
1105 MODE_RULES: {
1105 MODE_RULES: {
1106 b'd': b'action-drop',
1106 b'd': b'action-drop',
1107 b'e': b'action-edit',
1107 b'e': b'action-edit',
1108 b'f': b'action-fold',
1108 b'f': b'action-fold',
1109 b'm': b'action-mess',
1109 b'm': b'action-mess',
1110 b'p': b'action-pick',
1110 b'p': b'action-pick',
1111 b'r': b'action-roll',
1111 b'r': b'action-roll',
1112 b' ': b'select',
1112 b' ': b'select',
1113 b'j': b'down',
1113 b'j': b'down',
1114 b'k': b'up',
1114 b'k': b'up',
1115 b'KEY_DOWN': b'down',
1115 b'KEY_DOWN': b'down',
1116 b'KEY_UP': b'up',
1116 b'KEY_UP': b'up',
1117 b'J': b'move-down',
1117 b'J': b'move-down',
1118 b'K': b'move-up',
1118 b'K': b'move-up',
1119 b'KEY_NPAGE': b'move-down',
1119 b'KEY_NPAGE': b'move-down',
1120 b'KEY_PPAGE': b'move-up',
1120 b'KEY_PPAGE': b'move-up',
1121 b'0': b'goto', # Used for 0..9
1121 b'0': b'goto', # Used for 0..9
1122 },
1122 },
1123 MODE_PATCH: {
1123 MODE_PATCH: {
1124 b' ': b'page-down',
1124 b' ': b'page-down',
1125 b'KEY_NPAGE': b'page-down',
1125 b'KEY_NPAGE': b'page-down',
1126 b'KEY_PPAGE': b'page-up',
1126 b'KEY_PPAGE': b'page-up',
1127 b'j': b'line-down',
1127 b'j': b'line-down',
1128 b'k': b'line-up',
1128 b'k': b'line-up',
1129 b'KEY_DOWN': b'line-down',
1129 b'KEY_DOWN': b'line-down',
1130 b'KEY_UP': b'line-up',
1130 b'KEY_UP': b'line-up',
1131 b'J': b'down',
1131 b'J': b'down',
1132 b'K': b'up',
1132 b'K': b'up',
1133 },
1133 },
1134 MODE_HELP: {},
1134 MODE_HELP: {},
1135 }
1135 }
1136
1136
1137
1137
1138 def screen_size():
1138 def screen_size():
1139 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1139 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1140
1140
1141
1141
1142 class histeditrule:
1142 class histeditrule:
1143 def __init__(self, ui, ctx, pos, action=b'pick'):
1143 def __init__(self, ui, ctx, pos, action=b'pick'):
1144 self.ui = ui
1144 self.ui = ui
1145 self.ctx = ctx
1145 self.ctx = ctx
1146 self.action = action
1146 self.action = action
1147 self.origpos = pos
1147 self.origpos = pos
1148 self.pos = pos
1148 self.pos = pos
1149 self.conflicts = []
1149 self.conflicts = []
1150
1150
1151 def __bytes__(self):
1151 def __bytes__(self):
1152 # Example display of several histeditrules:
1152 # Example display of several histeditrules:
1153 #
1153 #
1154 # #10 pick 316392:06a16c25c053 add option to skip tests
1154 # #10 pick 316392:06a16c25c053 add option to skip tests
1155 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1155 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1156 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1156 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1157 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1157 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1158 #
1158 #
1159 # The carets point to the changeset being folded into ("roll this
1159 # The carets point to the changeset being folded into ("roll this
1160 # changeset into the changeset above").
1160 # changeset into the changeset above").
1161 return b'%s%s' % (self.prefix, self.desc)
1161 return b'%s%s' % (self.prefix, self.desc)
1162
1162
1163 __str__ = encoding.strmethod(__bytes__)
1163 __str__ = encoding.strmethod(__bytes__)
1164
1164
1165 @property
1165 @property
1166 def prefix(self):
1166 def prefix(self):
1167 # Some actions ('fold' and 'roll') combine a patch with a
1167 # Some actions ('fold' and 'roll') combine a patch with a
1168 # previous one. Add a marker showing which patch they apply
1168 # previous one. Add a marker showing which patch they apply
1169 # to.
1169 # to.
1170 action = ACTION_LABELS.get(self.action, self.action)
1170 action = ACTION_LABELS.get(self.action, self.action)
1171
1171
1172 h = self.ctx.hex()[0:12]
1172 h = self.ctx.hex()[0:12]
1173 r = self.ctx.rev()
1173 r = self.ctx.rev()
1174
1174
1175 return b"#%s %s %d:%s " % (
1175 return b"#%s %s %d:%s " % (
1176 (b'%d' % self.origpos).ljust(2),
1176 (b'%d' % self.origpos).ljust(2),
1177 action.ljust(6),
1177 action.ljust(6),
1178 r,
1178 r,
1179 h,
1179 h,
1180 )
1180 )
1181
1181
1182 @util.propertycache
1182 @util.propertycache
1183 def desc(self):
1183 def desc(self):
1184 summary = cmdutil.rendertemplate(
1184 summary = cmdutil.rendertemplate(
1185 self.ctx, self.ui.config(b'histedit', b'summary-template')
1185 self.ctx, self.ui.config(b'histedit', b'summary-template')
1186 )
1186 )
1187 if summary:
1187 if summary:
1188 return summary
1188 return summary
1189 # This is split off from the prefix property so that we can
1189 # This is split off from the prefix property so that we can
1190 # separately make the description for 'roll' red (since it
1190 # separately make the description for 'roll' red (since it
1191 # will get discarded).
1191 # will get discarded).
1192 return stringutil.firstline(self.ctx.description())
1192 return stringutil.firstline(self.ctx.description())
1193
1193
1194 def checkconflicts(self, other):
1194 def checkconflicts(self, other):
1195 if other.pos > self.pos and other.origpos <= self.origpos:
1195 if other.pos > self.pos and other.origpos <= self.origpos:
1196 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1196 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1197 self.conflicts.append(other)
1197 self.conflicts.append(other)
1198 return self.conflicts
1198 return self.conflicts
1199
1199
1200 if other in self.conflicts:
1200 if other in self.conflicts:
1201 self.conflicts.remove(other)
1201 self.conflicts.remove(other)
1202 return self.conflicts
1202 return self.conflicts
1203
1203
1204
1204
1205 def makecommands(rules):
1205 def makecommands(rules):
1206 """Returns a list of commands consumable by histedit --commands based on
1206 """Returns a list of commands consumable by histedit --commands based on
1207 our list of rules"""
1207 our list of rules"""
1208 commands = []
1208 commands = []
1209 for rules in rules:
1209 for rules in rules:
1210 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1210 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1211 return commands
1211 return commands
1212
1212
1213
1213
1214 def addln(win, y, x, line, color=None):
1214 def addln(win, y, x, line, color=None):
1215 """Add a line to the given window left padding but 100% filled with
1215 """Add a line to the given window left padding but 100% filled with
1216 whitespace characters, so that the color appears on the whole line"""
1216 whitespace characters, so that the color appears on the whole line"""
1217 maxy, maxx = win.getmaxyx()
1217 maxy, maxx = win.getmaxyx()
1218 length = maxx - 1 - x
1218 length = maxx - 1 - x
1219 line = bytes(line).ljust(length)[:length]
1219 line = bytes(line).ljust(length)[:length]
1220 if y < 0:
1220 if y < 0:
1221 y = maxy + y
1221 y = maxy + y
1222 if x < 0:
1222 if x < 0:
1223 x = maxx + x
1223 x = maxx + x
1224 if color:
1224 if color:
1225 win.addstr(y, x, line, color)
1225 win.addstr(y, x, line, color)
1226 else:
1226 else:
1227 win.addstr(y, x, line)
1227 win.addstr(y, x, line)
1228
1228
1229
1229
1230 def _trunc_head(line, n):
1230 def _trunc_head(line, n):
1231 if len(line) <= n:
1231 if len(line) <= n:
1232 return line
1232 return line
1233 return b'> ' + line[-(n - 2) :]
1233 return b'> ' + line[-(n - 2) :]
1234
1234
1235
1235
1236 def _trunc_tail(line, n):
1236 def _trunc_tail(line, n):
1237 if len(line) <= n:
1237 if len(line) <= n:
1238 return line
1238 return line
1239 return line[: n - 2] + b' >'
1239 return line[: n - 2] + b' >'
1240
1240
1241
1241
1242 class _chistedit_state:
1242 class _chistedit_state:
1243 def __init__(
1243 def __init__(
1244 self,
1244 self,
1245 repo,
1245 repo,
1246 rules,
1246 rules,
1247 stdscr,
1247 stdscr,
1248 ):
1248 ):
1249 self.repo = repo
1249 self.repo = repo
1250 self.rules = rules
1250 self.rules = rules
1251 self.stdscr = stdscr
1251 self.stdscr = stdscr
1252 self.later_on_top = repo.ui.configbool(
1252 self.later_on_top = repo.ui.configbool(
1253 b'histedit', b'later-commits-first'
1253 b'histedit', b'later-commits-first'
1254 )
1254 )
1255 # The current item in display order, initialized to point to the top
1255 # The current item in display order, initialized to point to the top
1256 # of the screen.
1256 # of the screen.
1257 self.pos = 0
1257 self.pos = 0
1258 self.selected = None
1258 self.selected = None
1259 self.mode = (MODE_INIT, MODE_INIT)
1259 self.mode = (MODE_INIT, MODE_INIT)
1260 self.page_height = None
1260 self.page_height = None
1261 self.modes = {
1261 self.modes = {
1262 MODE_RULES: {
1262 MODE_RULES: {
1263 b'line_offset': 0,
1263 b'line_offset': 0,
1264 },
1264 },
1265 MODE_PATCH: {
1265 MODE_PATCH: {
1266 b'line_offset': 0,
1266 b'line_offset': 0,
1267 },
1267 },
1268 }
1268 }
1269
1269
1270 def render_commit(self, win):
1270 def render_commit(self, win):
1271 """Renders the commit window that shows the log of the current selected
1271 """Renders the commit window that shows the log of the current selected
1272 commit"""
1272 commit"""
1273 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1273 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1274
1274
1275 ctx = rule.ctx
1275 ctx = rule.ctx
1276 win.box()
1276 win.box()
1277
1277
1278 maxy, maxx = win.getmaxyx()
1278 maxy, maxx = win.getmaxyx()
1279 length = maxx - 3
1279 length = maxx - 3
1280
1280
1281 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1281 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1282 win.addstr(1, 1, line[:length])
1282 win.addstr(1, 1, line[:length])
1283
1283
1284 line = b"user: %s" % ctx.user()
1284 line = b"user: %s" % ctx.user()
1285 win.addstr(2, 1, line[:length])
1285 win.addstr(2, 1, line[:length])
1286
1286
1287 bms = self.repo.nodebookmarks(ctx.node())
1287 bms = self.repo.nodebookmarks(ctx.node())
1288 line = b"bookmark: %s" % b' '.join(bms)
1288 line = b"bookmark: %s" % b' '.join(bms)
1289 win.addstr(3, 1, line[:length])
1289 win.addstr(3, 1, line[:length])
1290
1290
1291 line = b"summary: %s" % stringutil.firstline(ctx.description())
1291 line = b"summary: %s" % stringutil.firstline(ctx.description())
1292 win.addstr(4, 1, line[:length])
1292 win.addstr(4, 1, line[:length])
1293
1293
1294 line = b"files: "
1294 line = b"files: "
1295 win.addstr(5, 1, line)
1295 win.addstr(5, 1, line)
1296 fnx = 1 + len(line)
1296 fnx = 1 + len(line)
1297 fnmaxx = length - fnx + 1
1297 fnmaxx = length - fnx + 1
1298 y = 5
1298 y = 5
1299 fnmaxn = maxy - (1 + y) - 1
1299 fnmaxn = maxy - (1 + y) - 1
1300 files = ctx.files()
1300 files = ctx.files()
1301 for i, line1 in enumerate(files):
1301 for i, line1 in enumerate(files):
1302 if len(files) > fnmaxn and i == fnmaxn - 1:
1302 if len(files) > fnmaxn and i == fnmaxn - 1:
1303 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1303 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1304 y = y + 1
1304 y = y + 1
1305 break
1305 break
1306 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1306 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1307 y = y + 1
1307 y = y + 1
1308
1308
1309 conflicts = rule.conflicts
1309 conflicts = rule.conflicts
1310 if len(conflicts) > 0:
1310 if len(conflicts) > 0:
1311 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1311 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1312 conflictstr = b"changed files overlap with %s" % conflictstr
1312 conflictstr = b"changed files overlap with %s" % conflictstr
1313 else:
1313 else:
1314 conflictstr = b'no overlap'
1314 conflictstr = b'no overlap'
1315
1315
1316 win.addstr(y, 1, conflictstr[:length])
1316 win.addstr(y, 1, conflictstr[:length])
1317 win.noutrefresh()
1317 win.noutrefresh()
1318
1318
1319 def helplines(self):
1319 def helplines(self):
1320 if self.mode[0] == MODE_PATCH:
1320 if self.mode[0] == MODE_PATCH:
1321 help = b"""\
1321 help = b"""\
1322 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1322 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1323 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1323 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1324 """
1324 """
1325 else:
1325 else:
1326 help = b"""\
1326 help = b"""\
1327 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1327 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1328 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1328 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1329 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1329 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1330 """
1330 """
1331 if self.later_on_top:
1331 if self.later_on_top:
1332 help += b"Newer commits are shown above older commits.\n"
1332 help += b"Newer commits are shown above older commits.\n"
1333 else:
1333 else:
1334 help += b"Older commits are shown above newer commits.\n"
1334 help += b"Older commits are shown above newer commits.\n"
1335 return help.splitlines()
1335 return help.splitlines()
1336
1336
1337 def render_help(self, win):
1337 def render_help(self, win):
1338 maxy, maxx = win.getmaxyx()
1338 maxy, maxx = win.getmaxyx()
1339 for y, line in enumerate(self.helplines()):
1339 for y, line in enumerate(self.helplines()):
1340 if y >= maxy:
1340 if y >= maxy:
1341 break
1341 break
1342 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1342 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1343 win.noutrefresh()
1343 win.noutrefresh()
1344
1344
1345 def layout(self):
1345 def layout(self):
1346 maxy, maxx = self.stdscr.getmaxyx()
1346 maxy, maxx = self.stdscr.getmaxyx()
1347 helplen = len(self.helplines())
1347 helplen = len(self.helplines())
1348 mainlen = maxy - helplen - 12
1348 mainlen = maxy - helplen - 12
1349 if mainlen < 1:
1349 if mainlen < 1:
1350 raise error.Abort(
1350 raise error.Abort(
1351 _(b"terminal dimensions %d by %d too small for curses histedit")
1351 _(b"terminal dimensions %d by %d too small for curses histedit")
1352 % (maxy, maxx),
1352 % (maxy, maxx),
1353 hint=_(
1353 hint=_(
1354 b"enlarge your terminal or use --config ui.interface=text"
1354 b"enlarge your terminal or use --config ui.interface=text"
1355 ),
1355 ),
1356 )
1356 )
1357 return {
1357 return {
1358 b'commit': (12, maxx),
1358 b'commit': (12, maxx),
1359 b'help': (helplen, maxx),
1359 b'help': (helplen, maxx),
1360 b'main': (mainlen, maxx),
1360 b'main': (mainlen, maxx),
1361 }
1361 }
1362
1362
1363 def display_pos_to_rule_pos(self, display_pos):
1363 def display_pos_to_rule_pos(self, display_pos):
1364 """Converts a position in display order to rule order.
1364 """Converts a position in display order to rule order.
1365
1365
1366 The `display_pos` is the order from the top in display order, not
1366 The `display_pos` is the order from the top in display order, not
1367 considering which items are currently visible on the screen. Thus,
1367 considering which items are currently visible on the screen. Thus,
1368 `display_pos=0` is the item at the top (possibly after scrolling to
1368 `display_pos=0` is the item at the top (possibly after scrolling to
1369 the top)
1369 the top)
1370 """
1370 """
1371 if self.later_on_top:
1371 if self.later_on_top:
1372 return len(self.rules) - 1 - display_pos
1372 return len(self.rules) - 1 - display_pos
1373 else:
1373 else:
1374 return display_pos
1374 return display_pos
1375
1375
1376 def render_rules(self, rulesscr):
1376 def render_rules(self, rulesscr):
1377 start = self.modes[MODE_RULES][b'line_offset']
1377 start = self.modes[MODE_RULES][b'line_offset']
1378
1378
1379 conflicts = [r.ctx for r in self.rules if r.conflicts]
1379 conflicts = [r.ctx for r in self.rules if r.conflicts]
1380 if len(conflicts) > 0:
1380 if len(conflicts) > 0:
1381 line = b"potential conflict in %s" % b','.join(
1381 line = b"potential conflict in %s" % b','.join(
1382 map(pycompat.bytestr, conflicts)
1382 map(pycompat.bytestr, conflicts)
1383 )
1383 )
1384 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1384 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1385
1385
1386 for display_pos in range(start, len(self.rules)):
1386 for display_pos in range(start, len(self.rules)):
1387 y = display_pos - start
1387 y = display_pos - start
1388 if y < 0 or y >= self.page_height:
1388 if y < 0 or y >= self.page_height:
1389 continue
1389 continue
1390 rule_pos = self.display_pos_to_rule_pos(display_pos)
1390 rule_pos = self.display_pos_to_rule_pos(display_pos)
1391 rule = self.rules[rule_pos]
1391 rule = self.rules[rule_pos]
1392 if len(rule.conflicts) > 0:
1392 if len(rule.conflicts) > 0:
1393 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1393 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1394 else:
1394 else:
1395 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1395 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1396
1396
1397 if display_pos == self.selected:
1397 if display_pos == self.selected:
1398 rollcolor = COLOR_ROLL_SELECTED
1398 rollcolor = COLOR_ROLL_SELECTED
1399 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1399 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1400 elif display_pos == self.pos:
1400 elif display_pos == self.pos:
1401 rollcolor = COLOR_ROLL_CURRENT
1401 rollcolor = COLOR_ROLL_CURRENT
1402 addln(
1402 addln(
1403 rulesscr,
1403 rulesscr,
1404 y,
1404 y,
1405 2,
1405 2,
1406 rule,
1406 rule,
1407 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1407 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1408 )
1408 )
1409 else:
1409 else:
1410 rollcolor = COLOR_ROLL
1410 rollcolor = COLOR_ROLL
1411 addln(rulesscr, y, 2, rule)
1411 addln(rulesscr, y, 2, rule)
1412
1412
1413 if rule.action == b'roll':
1413 if rule.action == b'roll':
1414 rulesscr.addstr(
1414 rulesscr.addstr(
1415 y,
1415 y,
1416 2 + len(rule.prefix),
1416 2 + len(rule.prefix),
1417 rule.desc,
1417 rule.desc,
1418 curses.color_pair(rollcolor),
1418 curses.color_pair(rollcolor),
1419 )
1419 )
1420
1420
1421 rulesscr.noutrefresh()
1421 rulesscr.noutrefresh()
1422
1422
1423 def render_string(self, win, output, diffcolors=False):
1423 def render_string(self, win, output, diffcolors=False):
1424 maxy, maxx = win.getmaxyx()
1424 maxy, maxx = win.getmaxyx()
1425 length = min(maxy - 1, len(output))
1425 length = min(maxy - 1, len(output))
1426 for y in range(0, length):
1426 for y in range(0, length):
1427 line = output[y]
1427 line = output[y]
1428 if diffcolors:
1428 if diffcolors:
1429 if line.startswith(b'+'):
1429 if line.startswith(b'+'):
1430 win.addstr(
1430 win.addstr(
1431 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1431 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1432 )
1432 )
1433 elif line.startswith(b'-'):
1433 elif line.startswith(b'-'):
1434 win.addstr(
1434 win.addstr(
1435 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1435 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1436 )
1436 )
1437 elif line.startswith(b'@@ '):
1437 elif line.startswith(b'@@ '):
1438 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1438 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1439 else:
1439 else:
1440 win.addstr(y, 0, line)
1440 win.addstr(y, 0, line)
1441 else:
1441 else:
1442 win.addstr(y, 0, line)
1442 win.addstr(y, 0, line)
1443 win.noutrefresh()
1443 win.noutrefresh()
1444
1444
1445 def render_patch(self, win):
1445 def render_patch(self, win):
1446 start = self.modes[MODE_PATCH][b'line_offset']
1446 start = self.modes[MODE_PATCH][b'line_offset']
1447 content = self.modes[MODE_PATCH][b'patchcontents']
1447 content = self.modes[MODE_PATCH][b'patchcontents']
1448 self.render_string(win, content[start:], diffcolors=True)
1448 self.render_string(win, content[start:], diffcolors=True)
1449
1449
1450 def event(self, ch):
1450 def event(self, ch):
1451 """Change state based on the current character input
1451 """Change state based on the current character input
1452
1452
1453 This takes the current state and based on the current character input from
1453 This takes the current state and based on the current character input from
1454 the user we change the state.
1454 the user we change the state.
1455 """
1455 """
1456 oldpos = self.pos
1456 oldpos = self.pos
1457
1457
1458 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1458 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1459 return E_RESIZE
1459 return E_RESIZE
1460
1460
1461 lookup_ch = ch
1461 lookup_ch = ch
1462 if ch is not None and b'0' <= ch <= b'9':
1462 if ch is not None and b'0' <= ch <= b'9':
1463 lookup_ch = b'0'
1463 lookup_ch = b'0'
1464
1464
1465 curmode, prevmode = self.mode
1465 curmode, prevmode = self.mode
1466 action = KEYTABLE[curmode].get(
1466 action = KEYTABLE[curmode].get(
1467 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1467 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1468 )
1468 )
1469 if action is None:
1469 if action is None:
1470 return
1470 return
1471 if action in (b'down', b'move-down'):
1471 if action in (b'down', b'move-down'):
1472 newpos = min(oldpos + 1, len(self.rules) - 1)
1472 newpos = min(oldpos + 1, len(self.rules) - 1)
1473 self.move_cursor(oldpos, newpos)
1473 self.move_cursor(oldpos, newpos)
1474 if self.selected is not None or action == b'move-down':
1474 if self.selected is not None or action == b'move-down':
1475 self.swap(oldpos, newpos)
1475 self.swap(oldpos, newpos)
1476 elif action in (b'up', b'move-up'):
1476 elif action in (b'up', b'move-up'):
1477 newpos = max(0, oldpos - 1)
1477 newpos = max(0, oldpos - 1)
1478 self.move_cursor(oldpos, newpos)
1478 self.move_cursor(oldpos, newpos)
1479 if self.selected is not None or action == b'move-up':
1479 if self.selected is not None or action == b'move-up':
1480 self.swap(oldpos, newpos)
1480 self.swap(oldpos, newpos)
1481 elif action == b'next-action':
1481 elif action == b'next-action':
1482 self.cycle_action(oldpos, next=True)
1482 self.cycle_action(oldpos, next=True)
1483 elif action == b'prev-action':
1483 elif action == b'prev-action':
1484 self.cycle_action(oldpos, next=False)
1484 self.cycle_action(oldpos, next=False)
1485 elif action == b'select':
1485 elif action == b'select':
1486 self.selected = oldpos if self.selected is None else None
1486 self.selected = oldpos if self.selected is None else None
1487 self.make_selection(self.selected)
1487 self.make_selection(self.selected)
1488 elif action == b'goto' and int(ch) < len(self.rules) <= 10:
1488 elif action == b'goto' and int(ch) < len(self.rules) <= 10:
1489 newrule = next((r for r in self.rules if r.origpos == int(ch)))
1489 newrule = next((r for r in self.rules if r.origpos == int(ch)))
1490 self.move_cursor(oldpos, newrule.pos)
1490 self.move_cursor(oldpos, newrule.pos)
1491 if self.selected is not None:
1491 if self.selected is not None:
1492 self.swap(oldpos, newrule.pos)
1492 self.swap(oldpos, newrule.pos)
1493 elif action.startswith(b'action-'):
1493 elif action.startswith(b'action-'):
1494 self.change_action(oldpos, action[7:])
1494 self.change_action(oldpos, action[7:])
1495 elif action == b'showpatch':
1495 elif action == b'showpatch':
1496 self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode)
1496 self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode)
1497 elif action == b'help':
1497 elif action == b'help':
1498 self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode)
1498 self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode)
1499 elif action == b'quit':
1499 elif action == b'quit':
1500 return E_QUIT
1500 return E_QUIT
1501 elif action == b'histedit':
1501 elif action == b'histedit':
1502 return E_HISTEDIT
1502 return E_HISTEDIT
1503 elif action == b'page-down':
1503 elif action == b'page-down':
1504 return E_PAGEDOWN
1504 return E_PAGEDOWN
1505 elif action == b'page-up':
1505 elif action == b'page-up':
1506 return E_PAGEUP
1506 return E_PAGEUP
1507 elif action == b'line-down':
1507 elif action == b'line-down':
1508 return E_LINEDOWN
1508 return E_LINEDOWN
1509 elif action == b'line-up':
1509 elif action == b'line-up':
1510 return E_LINEUP
1510 return E_LINEUP
1511
1511
1512 def patch_contents(self):
1512 def patch_contents(self):
1513 repo = self.repo
1513 repo = self.repo
1514 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1514 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1515 displayer = logcmdutil.changesetdisplayer(
1515 displayer = logcmdutil.changesetdisplayer(
1516 repo.ui,
1516 repo.ui,
1517 repo,
1517 repo,
1518 {b"patch": True, b"template": b"status"},
1518 {b"patch": True, b"template": b"status"},
1519 buffered=True,
1519 buffered=True,
1520 )
1520 )
1521 overrides = {(b'ui', b'verbose'): True}
1521 overrides = {(b'ui', b'verbose'): True}
1522 with repo.ui.configoverride(overrides, source=b'histedit'):
1522 with repo.ui.configoverride(overrides, source=b'histedit'):
1523 displayer.show(rule.ctx)
1523 displayer.show(rule.ctx)
1524 displayer.close()
1524 displayer.close()
1525 return displayer.hunk[rule.ctx.rev()].splitlines()
1525 return displayer.hunk[rule.ctx.rev()].splitlines()
1526
1526
1527 def move_cursor(self, oldpos, newpos):
1527 def move_cursor(self, oldpos, newpos):
1528 """Change the rule/changeset that the cursor is pointing to, regardless of
1528 """Change the rule/changeset that the cursor is pointing to, regardless of
1529 current mode (you can switch between patches from the view patch window).
1529 current mode (you can switch between patches from the view patch window).
1530 """
1530 """
1531 self.pos = newpos
1531 self.pos = newpos
1532
1532
1533 mode, _ = self.mode
1533 mode, _ = self.mode
1534 if mode == MODE_RULES:
1534 if mode == MODE_RULES:
1535 # Scroll through the list by updating the view for MODE_RULES, so that
1535 # Scroll through the list by updating the view for MODE_RULES, so that
1536 # even if we are not currently viewing the rules, switching back will
1536 # even if we are not currently viewing the rules, switching back will
1537 # result in the cursor's rule being visible.
1537 # result in the cursor's rule being visible.
1538 modestate = self.modes[MODE_RULES]
1538 modestate = self.modes[MODE_RULES]
1539 if newpos < modestate[b'line_offset']:
1539 if newpos < modestate[b'line_offset']:
1540 modestate[b'line_offset'] = newpos
1540 modestate[b'line_offset'] = newpos
1541 elif newpos > modestate[b'line_offset'] + self.page_height - 1:
1541 elif newpos > modestate[b'line_offset'] + self.page_height - 1:
1542 modestate[b'line_offset'] = newpos - self.page_height + 1
1542 modestate[b'line_offset'] = newpos - self.page_height + 1
1543
1543
1544 # Reset the patch view region to the top of the new patch.
1544 # Reset the patch view region to the top of the new patch.
1545 self.modes[MODE_PATCH][b'line_offset'] = 0
1545 self.modes[MODE_PATCH][b'line_offset'] = 0
1546
1546
1547 def change_mode(self, mode):
1547 def change_mode(self, mode):
1548 curmode, _ = self.mode
1548 curmode, _ = self.mode
1549 self.mode = (mode, curmode)
1549 self.mode = (mode, curmode)
1550 if mode == MODE_PATCH:
1550 if mode == MODE_PATCH:
1551 self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents()
1551 self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents()
1552
1552
1553 def make_selection(self, pos):
1553 def make_selection(self, pos):
1554 self.selected = pos
1554 self.selected = pos
1555
1555
1556 def swap(self, oldpos, newpos):
1556 def swap(self, oldpos, newpos):
1557 """Swap two positions and calculate necessary conflicts in
1557 """Swap two positions and calculate necessary conflicts in
1558 O(|newpos-oldpos|) time"""
1558 O(|newpos-oldpos|) time"""
1559 old_rule_pos = self.display_pos_to_rule_pos(oldpos)
1559 old_rule_pos = self.display_pos_to_rule_pos(oldpos)
1560 new_rule_pos = self.display_pos_to_rule_pos(newpos)
1560 new_rule_pos = self.display_pos_to_rule_pos(newpos)
1561
1561
1562 rules = self.rules
1562 rules = self.rules
1563 assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules)
1563 assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules)
1564
1564
1565 rules[old_rule_pos], rules[new_rule_pos] = (
1565 rules[old_rule_pos], rules[new_rule_pos] = (
1566 rules[new_rule_pos],
1566 rules[new_rule_pos],
1567 rules[old_rule_pos],
1567 rules[old_rule_pos],
1568 )
1568 )
1569
1569
1570 # TODO: swap should not know about histeditrule's internals
1570 # TODO: swap should not know about histeditrule's internals
1571 rules[new_rule_pos].pos = new_rule_pos
1571 rules[new_rule_pos].pos = new_rule_pos
1572 rules[old_rule_pos].pos = old_rule_pos
1572 rules[old_rule_pos].pos = old_rule_pos
1573
1573
1574 start = min(old_rule_pos, new_rule_pos)
1574 start = min(old_rule_pos, new_rule_pos)
1575 end = max(old_rule_pos, new_rule_pos)
1575 end = max(old_rule_pos, new_rule_pos)
1576 for r in range(start, end + 1):
1576 for r in range(start, end + 1):
1577 rules[new_rule_pos].checkconflicts(rules[r])
1577 rules[new_rule_pos].checkconflicts(rules[r])
1578 rules[old_rule_pos].checkconflicts(rules[r])
1578 rules[old_rule_pos].checkconflicts(rules[r])
1579
1579
1580 if self.selected:
1580 if self.selected:
1581 self.make_selection(newpos)
1581 self.make_selection(newpos)
1582
1582
1583 def change_action(self, pos, action):
1583 def change_action(self, pos, action):
1584 """Change the action state on the given position to the new action"""
1584 """Change the action state on the given position to the new action"""
1585 rule_pos = self.display_pos_to_rule_pos(pos)
1585 rule_pos = self.display_pos_to_rule_pos(pos)
1586 assert 0 <= rule_pos < len(self.rules)
1586 assert 0 <= rule_pos < len(self.rules)
1587 self.rules[rule_pos].action = action
1587 self.rules[rule_pos].action = action
1588
1588
1589 def cycle_action(self, pos, next=False):
1589 def cycle_action(self, pos, next=False):
1590 """Changes the action state the next or the previous action from
1590 """Changes the action state the next or the previous action from
1591 the action list"""
1591 the action list"""
1592 rule_pos = self.display_pos_to_rule_pos(pos)
1592 rule_pos = self.display_pos_to_rule_pos(pos)
1593 assert 0 <= rule_pos < len(self.rules)
1593 assert 0 <= rule_pos < len(self.rules)
1594 current = self.rules[rule_pos].action
1594 current = self.rules[rule_pos].action
1595
1595
1596 assert current in KEY_LIST
1596 assert current in KEY_LIST
1597
1597
1598 index = KEY_LIST.index(current)
1598 index = KEY_LIST.index(current)
1599 if next:
1599 if next:
1600 index += 1
1600 index += 1
1601 else:
1601 else:
1602 index -= 1
1602 index -= 1
1603 # using pos instead of rule_pos because change_action() also calls
1603 # using pos instead of rule_pos because change_action() also calls
1604 # display_pos_to_rule_pos()
1604 # display_pos_to_rule_pos()
1605 self.change_action(pos, KEY_LIST[index % len(KEY_LIST)])
1605 self.change_action(pos, KEY_LIST[index % len(KEY_LIST)])
1606
1606
1607 def change_view(self, delta, unit):
1607 def change_view(self, delta, unit):
1608 """Change the region of whatever is being viewed (a patch or the list of
1608 """Change the region of whatever is being viewed (a patch or the list of
1609 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.
1609 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.
1610 """
1610 """
1611 mode, _ = self.mode
1611 mode, _ = self.mode
1612 if mode != MODE_PATCH:
1612 if mode != MODE_PATCH:
1613 return
1613 return
1614 mode_state = self.modes[mode]
1614 mode_state = self.modes[mode]
1615 num_lines = len(mode_state[b'patchcontents'])
1615 num_lines = len(mode_state[b'patchcontents'])
1616 page_height = self.page_height
1616 page_height = self.page_height
1617 unit = page_height if unit == b'page' else 1
1617 unit = page_height if unit == b'page' else 1
1618 num_pages = 1 + (num_lines - 1) // page_height
1618 num_pages = 1 + (num_lines - 1) // page_height
1619 max_offset = (num_pages - 1) * page_height
1619 max_offset = (num_pages - 1) * page_height
1620 newline = mode_state[b'line_offset'] + delta * unit
1620 newline = mode_state[b'line_offset'] + delta * unit
1621 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1621 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1622
1622
1623
1623
1624 def _chisteditmain(repo, rules, stdscr):
1624 def _chisteditmain(repo, rules, stdscr):
1625 try:
1625 try:
1626 curses.use_default_colors()
1626 curses.use_default_colors()
1627 except curses.error:
1627 except curses.error:
1628 pass
1628 pass
1629
1629
1630 # initialize color pattern
1630 # initialize color pattern
1631 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1631 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1632 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1632 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1633 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1633 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1634 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1634 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1635 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1635 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1636 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1636 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1637 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1637 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1638 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1638 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1639 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1639 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1640 curses.init_pair(
1640 curses.init_pair(
1641 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1641 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1642 )
1642 )
1643 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1643 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1644
1644
1645 # don't display the cursor
1645 # don't display the cursor
1646 try:
1646 try:
1647 curses.curs_set(0)
1647 curses.curs_set(0)
1648 except curses.error:
1648 except curses.error:
1649 pass
1649 pass
1650
1650
1651 def drawvertwin(size, y, x):
1651 def drawvertwin(size, y, x):
1652 win = curses.newwin(size[0], size[1], y, x)
1652 win = curses.newwin(size[0], size[1], y, x)
1653 y += size[0]
1653 y += size[0]
1654 return win, y, x
1654 return win, y, x
1655
1655
1656 state = _chistedit_state(repo, rules, stdscr)
1656 state = _chistedit_state(repo, rules, stdscr)
1657
1657
1658 # eventloop
1658 # eventloop
1659 ch = None
1659 ch = None
1660 stdscr.clear()
1660 stdscr.clear()
1661 stdscr.refresh()
1661 stdscr.refresh()
1662 while True:
1662 while True:
1663 oldmode, unused = state.mode
1663 oldmode, unused = state.mode
1664 if oldmode == MODE_INIT:
1664 if oldmode == MODE_INIT:
1665 state.change_mode(MODE_RULES)
1665 state.change_mode(MODE_RULES)
1666 e = state.event(ch)
1666 e = state.event(ch)
1667
1667
1668 if e == E_QUIT:
1668 if e == E_QUIT:
1669 return False
1669 return False
1670 if e == E_HISTEDIT:
1670 if e == E_HISTEDIT:
1671 return state.rules
1671 return state.rules
1672 else:
1672 else:
1673 if e == E_RESIZE:
1673 if e == E_RESIZE:
1674 size = screen_size()
1674 size = screen_size()
1675 if size != stdscr.getmaxyx():
1675 if size != stdscr.getmaxyx():
1676 curses.resizeterm(*size)
1676 curses.resizeterm(*size)
1677
1677
1678 sizes = state.layout()
1678 sizes = state.layout()
1679 curmode, unused = state.mode
1679 curmode, unused = state.mode
1680 if curmode != oldmode:
1680 if curmode != oldmode:
1681 state.page_height = sizes[b'main'][0]
1681 state.page_height = sizes[b'main'][0]
1682 # Adjust the view to fit the current screen size.
1682 # Adjust the view to fit the current screen size.
1683 state.move_cursor(state.pos, state.pos)
1683 state.move_cursor(state.pos, state.pos)
1684
1684
1685 # Pack the windows against the top, each pane spread across the
1685 # Pack the windows against the top, each pane spread across the
1686 # full width of the screen.
1686 # full width of the screen.
1687 y, x = (0, 0)
1687 y, x = (0, 0)
1688 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1688 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1689 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1689 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1690 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1690 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1691
1691
1692 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1692 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1693 if e == E_PAGEDOWN:
1693 if e == E_PAGEDOWN:
1694 state.change_view(+1, b'page')
1694 state.change_view(+1, b'page')
1695 elif e == E_PAGEUP:
1695 elif e == E_PAGEUP:
1696 state.change_view(-1, b'page')
1696 state.change_view(-1, b'page')
1697 elif e == E_LINEDOWN:
1697 elif e == E_LINEDOWN:
1698 state.change_view(+1, b'line')
1698 state.change_view(+1, b'line')
1699 elif e == E_LINEUP:
1699 elif e == E_LINEUP:
1700 state.change_view(-1, b'line')
1700 state.change_view(-1, b'line')
1701
1701
1702 # start rendering
1702 # start rendering
1703 commitwin.erase()
1703 commitwin.erase()
1704 helpwin.erase()
1704 helpwin.erase()
1705 mainwin.erase()
1705 mainwin.erase()
1706 if curmode == MODE_PATCH:
1706 if curmode == MODE_PATCH:
1707 state.render_patch(mainwin)
1707 state.render_patch(mainwin)
1708 elif curmode == MODE_HELP:
1708 elif curmode == MODE_HELP:
1709 state.render_string(mainwin, __doc__.strip().splitlines())
1709 state.render_string(mainwin, __doc__.strip().splitlines())
1710 else:
1710 else:
1711 state.render_rules(mainwin)
1711 state.render_rules(mainwin)
1712 state.render_commit(commitwin)
1712 state.render_commit(commitwin)
1713 state.render_help(helpwin)
1713 state.render_help(helpwin)
1714 curses.doupdate()
1714 curses.doupdate()
1715 # done rendering
1715 # done rendering
1716 ch = encoding.strtolocal(stdscr.getkey())
1716 ch = encoding.strtolocal(stdscr.getkey())
1717
1717
1718
1718
1719 def _chistedit(ui, repo, freeargs, opts):
1719 def _chistedit(ui, repo, state, freeargs, opts):
1720 """interactively edit changeset history via a curses interface
1720 """interactively edit changeset history via a curses interface
1721
1721
1722 Provides a ncurses interface to histedit. Press ? in chistedit mode
1722 Provides a ncurses interface to histedit. Press ? in chistedit mode
1723 to see an extensive help. Requires python-curses to be installed."""
1723 to see an extensive help. Requires python-curses to be installed."""
1724
1724
1725 if curses is None:
1725 if curses is None:
1726 raise error.Abort(_(b"Python curses library required"))
1726 raise error.Abort(_(b"Python curses library required"))
1727
1727
1728 # disable color
1728 # disable color
1729 ui._colormode = None
1729 ui._colormode = None
1730
1730
1731 try:
1731 try:
1732 keep = opts.get(b'keep')
1732 keep = opts.get(b'keep')
1733 revs = opts.get(b'rev', [])[:]
1733 revs = opts.get(b'rev', [])[:]
1734 cmdutil.checkunfinished(repo)
1734 cmdutil.checkunfinished(repo)
1735 cmdutil.bailifchanged(repo)
1735 cmdutil.bailifchanged(repo)
1736
1736
1737 revs.extend(freeargs)
1737 revs.extend(freeargs)
1738 if not revs:
1738 if not revs:
1739 defaultrev = destutil.desthistedit(ui, repo)
1739 defaultrev = destutil.desthistedit(ui, repo)
1740 if defaultrev is not None:
1740 if defaultrev is not None:
1741 revs.append(defaultrev)
1741 revs.append(defaultrev)
1742 if len(revs) != 1:
1742 if len(revs) != 1:
1743 raise error.InputError(
1743 raise error.InputError(
1744 _(b'histedit requires exactly one ancestor revision')
1744 _(b'histedit requires exactly one ancestor revision')
1745 )
1745 )
1746
1746
1747 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
1747 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
1748 if len(rr) != 1:
1748 if len(rr) != 1:
1749 raise error.InputError(
1749 raise error.InputError(
1750 _(
1750 _(
1751 b'The specified revisions must have '
1751 b'The specified revisions must have '
1752 b'exactly one common root'
1752 b'exactly one common root'
1753 )
1753 )
1754 )
1754 )
1755 root = rr[0].node()
1755 root = rr[0].node()
1756
1756
1757 topmost = repo.dirstate.p1()
1757 topmost = repo.dirstate.p1()
1758 revs = between(repo, root, topmost, keep)
1758 revs = between(repo, root, topmost, keep)
1759 if not revs:
1759 if not revs:
1760 raise error.InputError(
1760 raise error.InputError(
1761 _(b'%s is not an ancestor of working directory') % short(root)
1761 _(b'%s is not an ancestor of working directory') % short(root)
1762 )
1762 )
1763
1763
1764 rules = []
1764 rules = []
1765 for i, r in enumerate(revs):
1765 for i, r in enumerate(revs):
1766 rules.append(histeditrule(ui, repo[r], i))
1766 rules.append(histeditrule(ui, repo[r], i))
1767 with util.with_lc_ctype():
1767 with util.with_lc_ctype():
1768 rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules))
1768 rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules))
1769 if rc is False:
1769 if rc is False:
1770 ui.write(_(b"histedit aborted\n"))
1770 ui.write(_(b"histedit aborted\n"))
1771 return 0
1771 return 0
1772 if type(rc) is list:
1772 if type(rc) is list:
1773 ui.status(_(b"performing changes\n"))
1773 ui.status(_(b"performing changes\n"))
1774 rules = makecommands(rc)
1774 rules = makecommands(rc)
1775 with repo.vfs(b'chistedit', b'w+') as fp:
1775 with repo.vfs(b'chistedit', b'w+') as fp:
1776 for r in rules:
1776 for r in rules:
1777 fp.write(r)
1777 fp.write(r)
1778 opts[b'commands'] = fp.name
1778 opts[b'commands'] = fp.name
1779 return _texthistedit(ui, repo, freeargs, opts)
1779 return _texthistedit(ui, repo, state, freeargs, opts)
1780 except KeyboardInterrupt:
1780 except KeyboardInterrupt:
1781 pass
1781 pass
1782 return -1
1782 return -1
1783
1783
1784
1784
1785 @command(
1785 @command(
1786 b'histedit',
1786 b'histedit',
1787 [
1787 [
1788 (
1788 (
1789 b'',
1789 b'',
1790 b'commands',
1790 b'commands',
1791 b'',
1791 b'',
1792 _(b'read history edits from the specified file'),
1792 _(b'read history edits from the specified file'),
1793 _(b'FILE'),
1793 _(b'FILE'),
1794 ),
1794 ),
1795 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1795 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1796 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1796 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1797 (
1797 (
1798 b'k',
1798 b'k',
1799 b'keep',
1799 b'keep',
1800 False,
1800 False,
1801 _(b"don't strip old nodes after edit is complete"),
1801 _(b"don't strip old nodes after edit is complete"),
1802 ),
1802 ),
1803 (b'', b'abort', False, _(b'abort an edit in progress')),
1803 (b'', b'abort', False, _(b'abort an edit in progress')),
1804 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1804 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1805 (
1805 (
1806 b'f',
1806 b'f',
1807 b'force',
1807 b'force',
1808 False,
1808 False,
1809 _(b'force outgoing even for unrelated repositories'),
1809 _(b'force outgoing even for unrelated repositories'),
1810 ),
1810 ),
1811 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1811 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1812 ]
1812 ]
1813 + cmdutil.formatteropts,
1813 + cmdutil.formatteropts,
1814 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1814 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1815 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1815 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1816 )
1816 )
1817 def histedit(ui, repo, *freeargs, **opts):
1817 def histedit(ui, repo, *freeargs, **opts):
1818 """interactively edit changeset history
1818 """interactively edit changeset history
1819
1819
1820 This command lets you edit a linear series of changesets (up to
1820 This command lets you edit a linear series of changesets (up to
1821 and including the working directory, which should be clean).
1821 and including the working directory, which should be clean).
1822 You can:
1822 You can:
1823
1823
1824 - `pick` to [re]order a changeset
1824 - `pick` to [re]order a changeset
1825
1825
1826 - `drop` to omit changeset
1826 - `drop` to omit changeset
1827
1827
1828 - `mess` to reword the changeset commit message
1828 - `mess` to reword the changeset commit message
1829
1829
1830 - `fold` to combine it with the preceding changeset (using the later date)
1830 - `fold` to combine it with the preceding changeset (using the later date)
1831
1831
1832 - `roll` like fold, but discarding this commit's description and date
1832 - `roll` like fold, but discarding this commit's description and date
1833
1833
1834 - `edit` to edit this changeset (preserving date)
1834 - `edit` to edit this changeset (preserving date)
1835
1835
1836 - `base` to checkout changeset and apply further changesets from there
1836 - `base` to checkout changeset and apply further changesets from there
1837
1837
1838 There are a number of ways to select the root changeset:
1838 There are a number of ways to select the root changeset:
1839
1839
1840 - Specify ANCESTOR directly
1840 - Specify ANCESTOR directly
1841
1841
1842 - Use --outgoing -- it will be the first linear changeset not
1842 - Use --outgoing -- it will be the first linear changeset not
1843 included in destination. (See :hg:`help config.paths.default-push`)
1843 included in destination. (See :hg:`help config.paths.default-push`)
1844
1844
1845 - Otherwise, the value from the "histedit.defaultrev" config option
1845 - Otherwise, the value from the "histedit.defaultrev" config option
1846 is used as a revset to select the base revision when ANCESTOR is not
1846 is used as a revset to select the base revision when ANCESTOR is not
1847 specified. The first revision returned by the revset is used. By
1847 specified. The first revision returned by the revset is used. By
1848 default, this selects the editable history that is unique to the
1848 default, this selects the editable history that is unique to the
1849 ancestry of the working directory.
1849 ancestry of the working directory.
1850
1850
1851 .. container:: verbose
1851 .. container:: verbose
1852
1852
1853 If you use --outgoing, this command will abort if there are ambiguous
1853 If you use --outgoing, this command will abort if there are ambiguous
1854 outgoing revisions. For example, if there are multiple branches
1854 outgoing revisions. For example, if there are multiple branches
1855 containing outgoing revisions.
1855 containing outgoing revisions.
1856
1856
1857 Use "min(outgoing() and ::.)" or similar revset specification
1857 Use "min(outgoing() and ::.)" or similar revset specification
1858 instead of --outgoing to specify edit target revision exactly in
1858 instead of --outgoing to specify edit target revision exactly in
1859 such ambiguous situation. See :hg:`help revsets` for detail about
1859 such ambiguous situation. See :hg:`help revsets` for detail about
1860 selecting revisions.
1860 selecting revisions.
1861
1861
1862 .. container:: verbose
1862 .. container:: verbose
1863
1863
1864 Examples:
1864 Examples:
1865
1865
1866 - A number of changes have been made.
1866 - A number of changes have been made.
1867 Revision 3 is no longer needed.
1867 Revision 3 is no longer needed.
1868
1868
1869 Start history editing from revision 3::
1869 Start history editing from revision 3::
1870
1870
1871 hg histedit -r 3
1871 hg histedit -r 3
1872
1872
1873 An editor opens, containing the list of revisions,
1873 An editor opens, containing the list of revisions,
1874 with specific actions specified::
1874 with specific actions specified::
1875
1875
1876 pick 5339bf82f0ca 3 Zworgle the foobar
1876 pick 5339bf82f0ca 3 Zworgle the foobar
1877 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1877 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1878 pick 0a9639fcda9d 5 Morgify the cromulancy
1878 pick 0a9639fcda9d 5 Morgify the cromulancy
1879
1879
1880 Additional information about the possible actions
1880 Additional information about the possible actions
1881 to take appears below the list of revisions.
1881 to take appears below the list of revisions.
1882
1882
1883 To remove revision 3 from the history,
1883 To remove revision 3 from the history,
1884 its action (at the beginning of the relevant line)
1884 its action (at the beginning of the relevant line)
1885 is changed to 'drop'::
1885 is changed to 'drop'::
1886
1886
1887 drop 5339bf82f0ca 3 Zworgle the foobar
1887 drop 5339bf82f0ca 3 Zworgle the foobar
1888 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1888 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1889 pick 0a9639fcda9d 5 Morgify the cromulancy
1889 pick 0a9639fcda9d 5 Morgify the cromulancy
1890
1890
1891 - A number of changes have been made.
1891 - A number of changes have been made.
1892 Revision 2 and 4 need to be swapped.
1892 Revision 2 and 4 need to be swapped.
1893
1893
1894 Start history editing from revision 2::
1894 Start history editing from revision 2::
1895
1895
1896 hg histedit -r 2
1896 hg histedit -r 2
1897
1897
1898 An editor opens, containing the list of revisions,
1898 An editor opens, containing the list of revisions,
1899 with specific actions specified::
1899 with specific actions specified::
1900
1900
1901 pick 252a1af424ad 2 Blorb a morgwazzle
1901 pick 252a1af424ad 2 Blorb a morgwazzle
1902 pick 5339bf82f0ca 3 Zworgle the foobar
1902 pick 5339bf82f0ca 3 Zworgle the foobar
1903 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1903 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1904
1904
1905 To swap revision 2 and 4, its lines are swapped
1905 To swap revision 2 and 4, its lines are swapped
1906 in the editor::
1906 in the editor::
1907
1907
1908 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1908 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1909 pick 5339bf82f0ca 3 Zworgle the foobar
1909 pick 5339bf82f0ca 3 Zworgle the foobar
1910 pick 252a1af424ad 2 Blorb a morgwazzle
1910 pick 252a1af424ad 2 Blorb a morgwazzle
1911
1911
1912 Returns 0 on success, 1 if user intervention is required (not only
1912 Returns 0 on success, 1 if user intervention is required (not only
1913 for intentional "edit" command, but also for resolving unexpected
1913 for intentional "edit" command, but also for resolving unexpected
1914 conflicts).
1914 conflicts).
1915 """
1915 """
1916 opts = pycompat.byteskwargs(opts)
1916 opts = pycompat.byteskwargs(opts)
1917
1917
1918 # kludge: _chistedit only works for starting an edit, not aborting
1919 # or continuing, so fall back to regular _texthistedit for those
1920 # operations.
1921 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1922 return _chistedit(ui, repo, freeargs, opts)
1923 return _texthistedit(ui, repo, freeargs, opts)
1924
1925
1926 def _texthistedit(ui, repo, freeargs, opts):
1927 state = histeditstate(repo)
1918 state = histeditstate(repo)
1928 with repo.wlock() as wlock, repo.lock() as lock:
1919 with repo.wlock() as wlock, repo.lock() as lock:
1929 state.wlock = wlock
1920 state.wlock = wlock
1930 state.lock = lock
1921 state.lock = lock
1931 _histedit(ui, repo, state, freeargs, opts)
1922 # kludge: _chistedit only works for starting an edit, not aborting
1923 # or continuing, so fall back to regular _texthistedit for those
1924 # operations.
1925 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1926 return _chistedit(ui, repo, state, freeargs, opts)
1927 return _texthistedit(ui, repo, state, freeargs, opts)
1928
1929
1930 def _texthistedit(ui, repo, state, freeargs, opts):
1931 _histedit(ui, repo, state, freeargs, opts)
1932
1932
1933
1933
1934 goalcontinue = b'continue'
1934 goalcontinue = b'continue'
1935 goalabort = b'abort'
1935 goalabort = b'abort'
1936 goaleditplan = b'edit-plan'
1936 goaleditplan = b'edit-plan'
1937 goalnew = b'new'
1937 goalnew = b'new'
1938
1938
1939
1939
1940 def _getgoal(opts):
1940 def _getgoal(opts):
1941 if opts.get(b'continue'):
1941 if opts.get(b'continue'):
1942 return goalcontinue
1942 return goalcontinue
1943 if opts.get(b'abort'):
1943 if opts.get(b'abort'):
1944 return goalabort
1944 return goalabort
1945 if opts.get(b'edit_plan'):
1945 if opts.get(b'edit_plan'):
1946 return goaleditplan
1946 return goaleditplan
1947 return goalnew
1947 return goalnew
1948
1948
1949
1949
1950 def _readfile(ui, path):
1950 def _readfile(ui, path):
1951 if path == b'-':
1951 if path == b'-':
1952 with ui.timeblockedsection(b'histedit'):
1952 with ui.timeblockedsection(b'histedit'):
1953 return ui.fin.read()
1953 return ui.fin.read()
1954 else:
1954 else:
1955 with open(path, b'rb') as f:
1955 with open(path, b'rb') as f:
1956 return f.read()
1956 return f.read()
1957
1957
1958
1958
1959 def _validateargs(ui, repo, freeargs, opts, goal, rules, revs):
1959 def _validateargs(ui, repo, freeargs, opts, goal, rules, revs):
1960 # TODO only abort if we try to histedit mq patches, not just
1960 # TODO only abort if we try to histedit mq patches, not just
1961 # blanket if mq patches are applied somewhere
1961 # blanket if mq patches are applied somewhere
1962 mq = getattr(repo, 'mq', None)
1962 mq = getattr(repo, 'mq', None)
1963 if mq and mq.applied:
1963 if mq and mq.applied:
1964 raise error.StateError(_(b'source has mq patches applied'))
1964 raise error.StateError(_(b'source has mq patches applied'))
1965
1965
1966 # basic argument incompatibility processing
1966 # basic argument incompatibility processing
1967 outg = opts.get(b'outgoing')
1967 outg = opts.get(b'outgoing')
1968 editplan = opts.get(b'edit_plan')
1968 editplan = opts.get(b'edit_plan')
1969 abort = opts.get(b'abort')
1969 abort = opts.get(b'abort')
1970 force = opts.get(b'force')
1970 force = opts.get(b'force')
1971 if force and not outg:
1971 if force and not outg:
1972 raise error.InputError(_(b'--force only allowed with --outgoing'))
1972 raise error.InputError(_(b'--force only allowed with --outgoing'))
1973 if goal == b'continue':
1973 if goal == b'continue':
1974 if any((outg, abort, revs, freeargs, rules, editplan)):
1974 if any((outg, abort, revs, freeargs, rules, editplan)):
1975 raise error.InputError(_(b'no arguments allowed with --continue'))
1975 raise error.InputError(_(b'no arguments allowed with --continue'))
1976 elif goal == b'abort':
1976 elif goal == b'abort':
1977 if any((outg, revs, freeargs, rules, editplan)):
1977 if any((outg, revs, freeargs, rules, editplan)):
1978 raise error.InputError(_(b'no arguments allowed with --abort'))
1978 raise error.InputError(_(b'no arguments allowed with --abort'))
1979 elif goal == b'edit-plan':
1979 elif goal == b'edit-plan':
1980 if any((outg, revs, freeargs)):
1980 if any((outg, revs, freeargs)):
1981 raise error.InputError(
1981 raise error.InputError(
1982 _(b'only --commands argument allowed with --edit-plan')
1982 _(b'only --commands argument allowed with --edit-plan')
1983 )
1983 )
1984 else:
1984 else:
1985 if outg:
1985 if outg:
1986 if revs:
1986 if revs:
1987 raise error.InputError(
1987 raise error.InputError(
1988 _(b'no revisions allowed with --outgoing')
1988 _(b'no revisions allowed with --outgoing')
1989 )
1989 )
1990 if len(freeargs) > 1:
1990 if len(freeargs) > 1:
1991 raise error.InputError(
1991 raise error.InputError(
1992 _(b'only one repo argument allowed with --outgoing')
1992 _(b'only one repo argument allowed with --outgoing')
1993 )
1993 )
1994 else:
1994 else:
1995 revs.extend(freeargs)
1995 revs.extend(freeargs)
1996 if len(revs) == 0:
1996 if len(revs) == 0:
1997 defaultrev = destutil.desthistedit(ui, repo)
1997 defaultrev = destutil.desthistedit(ui, repo)
1998 if defaultrev is not None:
1998 if defaultrev is not None:
1999 revs.append(defaultrev)
1999 revs.append(defaultrev)
2000
2000
2001 if len(revs) != 1:
2001 if len(revs) != 1:
2002 raise error.InputError(
2002 raise error.InputError(
2003 _(b'histedit requires exactly one ancestor revision')
2003 _(b'histedit requires exactly one ancestor revision')
2004 )
2004 )
2005
2005
2006
2006
2007 def _histedit(ui, repo, state, freeargs, opts):
2007 def _histedit(ui, repo, state, freeargs, opts):
2008 fm = ui.formatter(b'histedit', opts)
2008 fm = ui.formatter(b'histedit', opts)
2009 fm.startitem()
2009 fm.startitem()
2010 goal = _getgoal(opts)
2010 goal = _getgoal(opts)
2011 revs = opts.get(b'rev', [])
2011 revs = opts.get(b'rev', [])
2012 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2012 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2013 rules = opts.get(b'commands', b'')
2013 rules = opts.get(b'commands', b'')
2014 state.keep = opts.get(b'keep', False)
2014 state.keep = opts.get(b'keep', False)
2015
2015
2016 _validateargs(ui, repo, freeargs, opts, goal, rules, revs)
2016 _validateargs(ui, repo, freeargs, opts, goal, rules, revs)
2017
2017
2018 hastags = False
2018 hastags = False
2019 if revs:
2019 if revs:
2020 revs = logcmdutil.revrange(repo, revs)
2020 revs = logcmdutil.revrange(repo, revs)
2021 ctxs = [repo[rev] for rev in revs]
2021 ctxs = [repo[rev] for rev in revs]
2022 for ctx in ctxs:
2022 for ctx in ctxs:
2023 tags = [tag for tag in ctx.tags() if tag != b'tip']
2023 tags = [tag for tag in ctx.tags() if tag != b'tip']
2024 if not hastags:
2024 if not hastags:
2025 hastags = len(tags)
2025 hastags = len(tags)
2026 if hastags:
2026 if hastags:
2027 if ui.promptchoice(
2027 if ui.promptchoice(
2028 _(
2028 _(
2029 b'warning: tags associated with the given'
2029 b'warning: tags associated with the given'
2030 b' changeset will be lost after histedit.\n'
2030 b' changeset will be lost after histedit.\n'
2031 b'do you want to continue (yN)? $$ &Yes $$ &No'
2031 b'do you want to continue (yN)? $$ &Yes $$ &No'
2032 ),
2032 ),
2033 default=1,
2033 default=1,
2034 ):
2034 ):
2035 raise error.CanceledError(_(b'histedit cancelled\n'))
2035 raise error.CanceledError(_(b'histedit cancelled\n'))
2036 # rebuild state
2036 # rebuild state
2037 if goal == goalcontinue:
2037 if goal == goalcontinue:
2038 state.read()
2038 state.read()
2039 state = bootstrapcontinue(ui, state, opts)
2039 state = bootstrapcontinue(ui, state, opts)
2040 elif goal == goaleditplan:
2040 elif goal == goaleditplan:
2041 _edithisteditplan(ui, repo, state, rules)
2041 _edithisteditplan(ui, repo, state, rules)
2042 return
2042 return
2043 elif goal == goalabort:
2043 elif goal == goalabort:
2044 _aborthistedit(ui, repo, state, nobackup=nobackup)
2044 _aborthistedit(ui, repo, state, nobackup=nobackup)
2045 return
2045 return
2046 else:
2046 else:
2047 # goal == goalnew
2047 # goal == goalnew
2048 _newhistedit(ui, repo, state, revs, freeargs, opts)
2048 _newhistedit(ui, repo, state, revs, freeargs, opts)
2049
2049
2050 _continuehistedit(ui, repo, state)
2050 _continuehistedit(ui, repo, state)
2051 _finishhistedit(ui, repo, state, fm)
2051 _finishhistedit(ui, repo, state, fm)
2052 fm.end()
2052 fm.end()
2053
2053
2054
2054
2055 def _continuehistedit(ui, repo, state):
2055 def _continuehistedit(ui, repo, state):
2056 """This function runs after either:
2056 """This function runs after either:
2057 - bootstrapcontinue (if the goal is 'continue')
2057 - bootstrapcontinue (if the goal is 'continue')
2058 - _newhistedit (if the goal is 'new')
2058 - _newhistedit (if the goal is 'new')
2059 """
2059 """
2060 # preprocess rules so that we can hide inner folds from the user
2060 # preprocess rules so that we can hide inner folds from the user
2061 # and only show one editor
2061 # and only show one editor
2062 actions = state.actions[:]
2062 actions = state.actions[:]
2063 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2063 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2064 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2064 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2065 state.actions[idx].__class__ = _multifold
2065 state.actions[idx].__class__ = _multifold
2066
2066
2067 # Force an initial state file write, so the user can run --abort/continue
2067 # Force an initial state file write, so the user can run --abort/continue
2068 # even if there's an exception before the first transaction serialize.
2068 # even if there's an exception before the first transaction serialize.
2069 state.write()
2069 state.write()
2070
2070
2071 tr = None
2071 tr = None
2072 # Don't use singletransaction by default since it rolls the entire
2072 # Don't use singletransaction by default since it rolls the entire
2073 # transaction back if an unexpected exception happens (like a
2073 # transaction back if an unexpected exception happens (like a
2074 # pretxncommit hook throws, or the user aborts the commit msg editor).
2074 # pretxncommit hook throws, or the user aborts the commit msg editor).
2075 if ui.configbool(b"histedit", b"singletransaction"):
2075 if ui.configbool(b"histedit", b"singletransaction"):
2076 # Don't use a 'with' for the transaction, since actions may close
2076 # Don't use a 'with' for the transaction, since actions may close
2077 # and reopen a transaction. For example, if the action executes an
2077 # and reopen a transaction. For example, if the action executes an
2078 # external process it may choose to commit the transaction first.
2078 # external process it may choose to commit the transaction first.
2079 tr = repo.transaction(b'histedit')
2079 tr = repo.transaction(b'histedit')
2080 progress = ui.makeprogress(
2080 progress = ui.makeprogress(
2081 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2081 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2082 )
2082 )
2083 with progress, util.acceptintervention(tr):
2083 with progress, util.acceptintervention(tr):
2084 while state.actions:
2084 while state.actions:
2085 state.write(tr=tr)
2085 state.write(tr=tr)
2086 actobj = state.actions[0]
2086 actobj = state.actions[0]
2087 progress.increment(item=actobj.torule())
2087 progress.increment(item=actobj.torule())
2088 ui.debug(
2088 ui.debug(
2089 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2089 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2090 )
2090 )
2091 parentctx, replacement_ = actobj.run()
2091 parentctx, replacement_ = actobj.run()
2092 state.parentctxnode = parentctx.node()
2092 state.parentctxnode = parentctx.node()
2093 state.replacements.extend(replacement_)
2093 state.replacements.extend(replacement_)
2094 state.actions.pop(0)
2094 state.actions.pop(0)
2095
2095
2096 state.write()
2096 state.write()
2097
2097
2098
2098
2099 def _finishhistedit(ui, repo, state, fm):
2099 def _finishhistedit(ui, repo, state, fm):
2100 """This action runs when histedit is finishing its session"""
2100 """This action runs when histedit is finishing its session"""
2101 mergemod.update(repo[state.parentctxnode])
2101 mergemod.update(repo[state.parentctxnode])
2102
2102
2103 mapping, tmpnodes, created, ntm = processreplacement(state)
2103 mapping, tmpnodes, created, ntm = processreplacement(state)
2104 if mapping:
2104 if mapping:
2105 for prec, succs in mapping.items():
2105 for prec, succs in mapping.items():
2106 if not succs:
2106 if not succs:
2107 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2107 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2108 else:
2108 else:
2109 ui.debug(
2109 ui.debug(
2110 b'histedit: %s is replaced by %s\n'
2110 b'histedit: %s is replaced by %s\n'
2111 % (short(prec), short(succs[0]))
2111 % (short(prec), short(succs[0]))
2112 )
2112 )
2113 if len(succs) > 1:
2113 if len(succs) > 1:
2114 m = b'histedit: %s'
2114 m = b'histedit: %s'
2115 for n in succs[1:]:
2115 for n in succs[1:]:
2116 ui.debug(m % short(n))
2116 ui.debug(m % short(n))
2117
2117
2118 if not state.keep:
2118 if not state.keep:
2119 if mapping:
2119 if mapping:
2120 movetopmostbookmarks(repo, state.topmost, ntm)
2120 movetopmostbookmarks(repo, state.topmost, ntm)
2121 # TODO update mq state
2121 # TODO update mq state
2122 else:
2122 else:
2123 mapping = {}
2123 mapping = {}
2124
2124
2125 for n in tmpnodes:
2125 for n in tmpnodes:
2126 if n in repo:
2126 if n in repo:
2127 mapping[n] = ()
2127 mapping[n] = ()
2128
2128
2129 # remove entries about unknown nodes
2129 # remove entries about unknown nodes
2130 has_node = repo.unfiltered().changelog.index.has_node
2130 has_node = repo.unfiltered().changelog.index.has_node
2131 mapping = {
2131 mapping = {
2132 k: v
2132 k: v
2133 for k, v in mapping.items()
2133 for k, v in mapping.items()
2134 if has_node(k) and all(has_node(n) for n in v)
2134 if has_node(k) and all(has_node(n) for n in v)
2135 }
2135 }
2136 scmutil.cleanupnodes(repo, mapping, b'histedit')
2136 scmutil.cleanupnodes(repo, mapping, b'histedit')
2137 hf = fm.hexfunc
2137 hf = fm.hexfunc
2138 fl = fm.formatlist
2138 fl = fm.formatlist
2139 fd = fm.formatdict
2139 fd = fm.formatdict
2140 nodechanges = fd(
2140 nodechanges = fd(
2141 {
2141 {
2142 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2142 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2143 for oldn, newn in mapping.items()
2143 for oldn, newn in mapping.items()
2144 },
2144 },
2145 key=b"oldnode",
2145 key=b"oldnode",
2146 value=b"newnodes",
2146 value=b"newnodes",
2147 )
2147 )
2148 fm.data(nodechanges=nodechanges)
2148 fm.data(nodechanges=nodechanges)
2149
2149
2150 state.clear()
2150 state.clear()
2151 if os.path.exists(repo.sjoin(b'undo')):
2151 if os.path.exists(repo.sjoin(b'undo')):
2152 os.unlink(repo.sjoin(b'undo'))
2152 os.unlink(repo.sjoin(b'undo'))
2153 if repo.vfs.exists(b'histedit-last-edit.txt'):
2153 if repo.vfs.exists(b'histedit-last-edit.txt'):
2154 repo.vfs.unlink(b'histedit-last-edit.txt')
2154 repo.vfs.unlink(b'histedit-last-edit.txt')
2155
2155
2156
2156
2157 def _aborthistedit(ui, repo, state, nobackup=False):
2157 def _aborthistedit(ui, repo, state, nobackup=False):
2158 try:
2158 try:
2159 state.read()
2159 state.read()
2160 __, leafs, tmpnodes, __ = processreplacement(state)
2160 __, leafs, tmpnodes, __ = processreplacement(state)
2161 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2161 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2162
2162
2163 # Recover our old commits if necessary
2163 # Recover our old commits if necessary
2164 if not state.topmost in repo and state.backupfile:
2164 if not state.topmost in repo and state.backupfile:
2165 backupfile = repo.vfs.join(state.backupfile)
2165 backupfile = repo.vfs.join(state.backupfile)
2166 f = hg.openpath(ui, backupfile)
2166 f = hg.openpath(ui, backupfile)
2167 gen = exchange.readbundle(ui, f, backupfile)
2167 gen = exchange.readbundle(ui, f, backupfile)
2168 with repo.transaction(b'histedit.abort') as tr:
2168 with repo.transaction(b'histedit.abort') as tr:
2169 bundle2.applybundle(
2169 bundle2.applybundle(
2170 repo,
2170 repo,
2171 gen,
2171 gen,
2172 tr,
2172 tr,
2173 source=b'histedit',
2173 source=b'histedit',
2174 url=b'bundle:' + backupfile,
2174 url=b'bundle:' + backupfile,
2175 )
2175 )
2176
2176
2177 os.remove(backupfile)
2177 os.remove(backupfile)
2178
2178
2179 # check whether we should update away
2179 # check whether we should update away
2180 if repo.unfiltered().revs(
2180 if repo.unfiltered().revs(
2181 b'parents() and (%n or %ln::)',
2181 b'parents() and (%n or %ln::)',
2182 state.parentctxnode,
2182 state.parentctxnode,
2183 leafs | tmpnodes,
2183 leafs | tmpnodes,
2184 ):
2184 ):
2185 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2185 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2186 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2186 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2187 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2187 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2188 except Exception:
2188 except Exception:
2189 if state.inprogress():
2189 if state.inprogress():
2190 ui.warn(
2190 ui.warn(
2191 _(
2191 _(
2192 b'warning: encountered an exception during histedit '
2192 b'warning: encountered an exception during histedit '
2193 b'--abort; the repository may not have been completely '
2193 b'--abort; the repository may not have been completely '
2194 b'cleaned up\n'
2194 b'cleaned up\n'
2195 )
2195 )
2196 )
2196 )
2197 raise
2197 raise
2198 finally:
2198 finally:
2199 state.clear()
2199 state.clear()
2200
2200
2201
2201
2202 def hgaborthistedit(ui, repo):
2202 def hgaborthistedit(ui, repo):
2203 state = histeditstate(repo)
2203 state = histeditstate(repo)
2204 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2204 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2205 with repo.wlock() as wlock, repo.lock() as lock:
2205 with repo.wlock() as wlock, repo.lock() as lock:
2206 state.wlock = wlock
2206 state.wlock = wlock
2207 state.lock = lock
2207 state.lock = lock
2208 _aborthistedit(ui, repo, state, nobackup=nobackup)
2208 _aborthistedit(ui, repo, state, nobackup=nobackup)
2209
2209
2210
2210
2211 def _edithisteditplan(ui, repo, state, rules):
2211 def _edithisteditplan(ui, repo, state, rules):
2212 state.read()
2212 state.read()
2213 if not rules:
2213 if not rules:
2214 comment = geteditcomment(
2214 comment = geteditcomment(
2215 ui, short(state.parentctxnode), short(state.topmost)
2215 ui, short(state.parentctxnode), short(state.topmost)
2216 )
2216 )
2217 rules = ruleeditor(repo, ui, state.actions, comment)
2217 rules = ruleeditor(repo, ui, state.actions, comment)
2218 else:
2218 else:
2219 rules = _readfile(ui, rules)
2219 rules = _readfile(ui, rules)
2220 actions = parserules(rules, state)
2220 actions = parserules(rules, state)
2221 ctxs = [repo[act.node] for act in state.actions if act.node]
2221 ctxs = [repo[act.node] for act in state.actions if act.node]
2222 warnverifyactions(ui, repo, actions, state, ctxs)
2222 warnverifyactions(ui, repo, actions, state, ctxs)
2223 state.actions = actions
2223 state.actions = actions
2224 state.write()
2224 state.write()
2225
2225
2226
2226
2227 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2227 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2228 outg = opts.get(b'outgoing')
2228 outg = opts.get(b'outgoing')
2229 rules = opts.get(b'commands', b'')
2229 rules = opts.get(b'commands', b'')
2230 force = opts.get(b'force')
2230 force = opts.get(b'force')
2231
2231
2232 cmdutil.checkunfinished(repo)
2232 cmdutil.checkunfinished(repo)
2233 cmdutil.bailifchanged(repo)
2233 cmdutil.bailifchanged(repo)
2234
2234
2235 topmost = repo.dirstate.p1()
2235 topmost = repo.dirstate.p1()
2236 if outg:
2236 if outg:
2237 if freeargs:
2237 if freeargs:
2238 remote = freeargs[0]
2238 remote = freeargs[0]
2239 else:
2239 else:
2240 remote = None
2240 remote = None
2241 root = findoutgoing(ui, repo, remote, force, opts)
2241 root = findoutgoing(ui, repo, remote, force, opts)
2242 else:
2242 else:
2243 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
2243 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
2244 if len(rr) != 1:
2244 if len(rr) != 1:
2245 raise error.InputError(
2245 raise error.InputError(
2246 _(
2246 _(
2247 b'The specified revisions must have '
2247 b'The specified revisions must have '
2248 b'exactly one common root'
2248 b'exactly one common root'
2249 )
2249 )
2250 )
2250 )
2251 root = rr[0].node()
2251 root = rr[0].node()
2252
2252
2253 revs = between(repo, root, topmost, state.keep)
2253 revs = between(repo, root, topmost, state.keep)
2254 if not revs:
2254 if not revs:
2255 raise error.InputError(
2255 raise error.InputError(
2256 _(b'%s is not an ancestor of working directory') % short(root)
2256 _(b'%s is not an ancestor of working directory') % short(root)
2257 )
2257 )
2258
2258
2259 ctxs = [repo[r] for r in revs]
2259 ctxs = [repo[r] for r in revs]
2260
2260
2261 wctx = repo[None]
2261 wctx = repo[None]
2262 # Please don't ask me why `ancestors` is this value. I figured it
2262 # Please don't ask me why `ancestors` is this value. I figured it
2263 # out with print-debugging, not by actually understanding what the
2263 # out with print-debugging, not by actually understanding what the
2264 # merge code is doing. :(
2264 # merge code is doing. :(
2265 ancs = [repo[b'.']]
2265 ancs = [repo[b'.']]
2266 # Sniff-test to make sure we won't collide with untracked files in
2266 # Sniff-test to make sure we won't collide with untracked files in
2267 # the working directory. If we don't do this, we can get a
2267 # the working directory. If we don't do this, we can get a
2268 # collision after we've started histedit and backing out gets ugly
2268 # collision after we've started histedit and backing out gets ugly
2269 # for everyone, especially the user.
2269 # for everyone, especially the user.
2270 for c in [ctxs[0].p1()] + ctxs:
2270 for c in [ctxs[0].p1()] + ctxs:
2271 try:
2271 try:
2272 mergemod.calculateupdates(
2272 mergemod.calculateupdates(
2273 repo,
2273 repo,
2274 wctx,
2274 wctx,
2275 c,
2275 c,
2276 ancs,
2276 ancs,
2277 # These parameters were determined by print-debugging
2277 # These parameters were determined by print-debugging
2278 # what happens later on inside histedit.
2278 # what happens later on inside histedit.
2279 branchmerge=False,
2279 branchmerge=False,
2280 force=False,
2280 force=False,
2281 acceptremote=False,
2281 acceptremote=False,
2282 followcopies=False,
2282 followcopies=False,
2283 )
2283 )
2284 except error.Abort:
2284 except error.Abort:
2285 raise error.StateError(
2285 raise error.StateError(
2286 _(
2286 _(
2287 b"untracked files in working directory conflict with files in %s"
2287 b"untracked files in working directory conflict with files in %s"
2288 )
2288 )
2289 % c
2289 % c
2290 )
2290 )
2291
2291
2292 if not rules:
2292 if not rules:
2293 comment = geteditcomment(ui, short(root), short(topmost))
2293 comment = geteditcomment(ui, short(root), short(topmost))
2294 actions = [pick(state, r) for r in revs]
2294 actions = [pick(state, r) for r in revs]
2295 rules = ruleeditor(repo, ui, actions, comment)
2295 rules = ruleeditor(repo, ui, actions, comment)
2296 else:
2296 else:
2297 rules = _readfile(ui, rules)
2297 rules = _readfile(ui, rules)
2298 actions = parserules(rules, state)
2298 actions = parserules(rules, state)
2299 warnverifyactions(ui, repo, actions, state, ctxs)
2299 warnverifyactions(ui, repo, actions, state, ctxs)
2300
2300
2301 parentctxnode = repo[root].p1().node()
2301 parentctxnode = repo[root].p1().node()
2302
2302
2303 state.parentctxnode = parentctxnode
2303 state.parentctxnode = parentctxnode
2304 state.actions = actions
2304 state.actions = actions
2305 state.topmost = topmost
2305 state.topmost = topmost
2306 state.replacements = []
2306 state.replacements = []
2307
2307
2308 ui.log(
2308 ui.log(
2309 b"histedit",
2309 b"histedit",
2310 b"%d actions to histedit\n",
2310 b"%d actions to histedit\n",
2311 len(actions),
2311 len(actions),
2312 histedit_num_actions=len(actions),
2312 histedit_num_actions=len(actions),
2313 )
2313 )
2314
2314
2315 # Create a backup so we can always abort completely.
2315 # Create a backup so we can always abort completely.
2316 backupfile = None
2316 backupfile = None
2317 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2317 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2318 backupfile = repair.backupbundle(
2318 backupfile = repair.backupbundle(
2319 repo, [parentctxnode], [topmost], root, b'histedit'
2319 repo, [parentctxnode], [topmost], root, b'histedit'
2320 )
2320 )
2321 state.backupfile = backupfile
2321 state.backupfile = backupfile
2322
2322
2323
2323
2324 def _getsummary(ctx):
2324 def _getsummary(ctx):
2325 return stringutil.firstline(ctx.description())
2325 return stringutil.firstline(ctx.description())
2326
2326
2327
2327
2328 def bootstrapcontinue(ui, state, opts):
2328 def bootstrapcontinue(ui, state, opts):
2329 repo = state.repo
2329 repo = state.repo
2330
2330
2331 ms = mergestatemod.mergestate.read(repo)
2331 ms = mergestatemod.mergestate.read(repo)
2332 mergeutil.checkunresolved(ms)
2332 mergeutil.checkunresolved(ms)
2333
2333
2334 if state.actions:
2334 if state.actions:
2335 actobj = state.actions.pop(0)
2335 actobj = state.actions.pop(0)
2336
2336
2337 if _isdirtywc(repo):
2337 if _isdirtywc(repo):
2338 actobj.continuedirty()
2338 actobj.continuedirty()
2339 if _isdirtywc(repo):
2339 if _isdirtywc(repo):
2340 abortdirty()
2340 abortdirty()
2341
2341
2342 parentctx, replacements = actobj.continueclean()
2342 parentctx, replacements = actobj.continueclean()
2343
2343
2344 state.parentctxnode = parentctx.node()
2344 state.parentctxnode = parentctx.node()
2345 state.replacements.extend(replacements)
2345 state.replacements.extend(replacements)
2346
2346
2347 return state
2347 return state
2348
2348
2349
2349
2350 def between(repo, old, new, keep):
2350 def between(repo, old, new, keep):
2351 """select and validate the set of revision to edit
2351 """select and validate the set of revision to edit
2352
2352
2353 When keep is false, the specified set can't have children."""
2353 When keep is false, the specified set can't have children."""
2354 revs = repo.revs(b'%n::%n', old, new)
2354 revs = repo.revs(b'%n::%n', old, new)
2355 if revs and not keep:
2355 if revs and not keep:
2356 rewriteutil.precheck(repo, revs, b'edit')
2356 rewriteutil.precheck(repo, revs, b'edit')
2357 if repo.revs(b'(%ld) and merge()', revs):
2357 if repo.revs(b'(%ld) and merge()', revs):
2358 raise error.StateError(
2358 raise error.StateError(
2359 _(b'cannot edit history that contains merges')
2359 _(b'cannot edit history that contains merges')
2360 )
2360 )
2361 return pycompat.maplist(repo.changelog.node, revs)
2361 return pycompat.maplist(repo.changelog.node, revs)
2362
2362
2363
2363
2364 def ruleeditor(repo, ui, actions, editcomment=b""):
2364 def ruleeditor(repo, ui, actions, editcomment=b""):
2365 """open an editor to edit rules
2365 """open an editor to edit rules
2366
2366
2367 rules are in the format [ [act, ctx], ...] like in state.rules
2367 rules are in the format [ [act, ctx], ...] like in state.rules
2368 """
2368 """
2369 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2369 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2370 newact = util.sortdict()
2370 newact = util.sortdict()
2371 for act in actions:
2371 for act in actions:
2372 ctx = repo[act.node]
2372 ctx = repo[act.node]
2373 summary = _getsummary(ctx)
2373 summary = _getsummary(ctx)
2374 fword = summary.split(b' ', 1)[0].lower()
2374 fword = summary.split(b' ', 1)[0].lower()
2375 added = False
2375 added = False
2376
2376
2377 # if it doesn't end with the special character '!' just skip this
2377 # if it doesn't end with the special character '!' just skip this
2378 if fword.endswith(b'!'):
2378 if fword.endswith(b'!'):
2379 fword = fword[:-1]
2379 fword = fword[:-1]
2380 if fword in primaryactions | secondaryactions | tertiaryactions:
2380 if fword in primaryactions | secondaryactions | tertiaryactions:
2381 act.verb = fword
2381 act.verb = fword
2382 # get the target summary
2382 # get the target summary
2383 tsum = summary[len(fword) + 1 :].lstrip()
2383 tsum = summary[len(fword) + 1 :].lstrip()
2384 # safe but slow: reverse iterate over the actions so we
2384 # safe but slow: reverse iterate over the actions so we
2385 # don't clash on two commits having the same summary
2385 # don't clash on two commits having the same summary
2386 for na, l in reversed(list(newact.items())):
2386 for na, l in reversed(list(newact.items())):
2387 actx = repo[na.node]
2387 actx = repo[na.node]
2388 asum = _getsummary(actx)
2388 asum = _getsummary(actx)
2389 if asum == tsum:
2389 if asum == tsum:
2390 added = True
2390 added = True
2391 l.append(act)
2391 l.append(act)
2392 break
2392 break
2393
2393
2394 if not added:
2394 if not added:
2395 newact[act] = []
2395 newact[act] = []
2396
2396
2397 # copy over and flatten the new list
2397 # copy over and flatten the new list
2398 actions = []
2398 actions = []
2399 for na, l in newact.items():
2399 for na, l in newact.items():
2400 actions.append(na)
2400 actions.append(na)
2401 actions += l
2401 actions += l
2402
2402
2403 rules = b'\n'.join([act.torule() for act in actions])
2403 rules = b'\n'.join([act.torule() for act in actions])
2404 rules += b'\n\n'
2404 rules += b'\n\n'
2405 rules += editcomment
2405 rules += editcomment
2406 rules = ui.edit(
2406 rules = ui.edit(
2407 rules,
2407 rules,
2408 ui.username(),
2408 ui.username(),
2409 {b'prefix': b'histedit'},
2409 {b'prefix': b'histedit'},
2410 repopath=repo.path,
2410 repopath=repo.path,
2411 action=b'histedit',
2411 action=b'histedit',
2412 )
2412 )
2413
2413
2414 # Save edit rules in .hg/histedit-last-edit.txt in case
2414 # Save edit rules in .hg/histedit-last-edit.txt in case
2415 # the user needs to ask for help after something
2415 # the user needs to ask for help after something
2416 # surprising happens.
2416 # surprising happens.
2417 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2417 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2418 f.write(rules)
2418 f.write(rules)
2419
2419
2420 return rules
2420 return rules
2421
2421
2422
2422
2423 def parserules(rules, state):
2423 def parserules(rules, state):
2424 """Read the histedit rules string and return list of action objects"""
2424 """Read the histedit rules string and return list of action objects"""
2425 rules = [
2425 rules = [
2426 l
2426 l
2427 for l in (r.strip() for r in rules.splitlines())
2427 for l in (r.strip() for r in rules.splitlines())
2428 if l and not l.startswith(b'#')
2428 if l and not l.startswith(b'#')
2429 ]
2429 ]
2430 actions = []
2430 actions = []
2431 for r in rules:
2431 for r in rules:
2432 if b' ' not in r:
2432 if b' ' not in r:
2433 raise error.ParseError(_(b'malformed line "%s"') % r)
2433 raise error.ParseError(_(b'malformed line "%s"') % r)
2434 verb, rest = r.split(b' ', 1)
2434 verb, rest = r.split(b' ', 1)
2435
2435
2436 if verb not in actiontable:
2436 if verb not in actiontable:
2437 raise error.ParseError(_(b'unknown action "%s"') % verb)
2437 raise error.ParseError(_(b'unknown action "%s"') % verb)
2438
2438
2439 action = actiontable[verb].fromrule(state, rest)
2439 action = actiontable[verb].fromrule(state, rest)
2440 actions.append(action)
2440 actions.append(action)
2441 return actions
2441 return actions
2442
2442
2443
2443
2444 def warnverifyactions(ui, repo, actions, state, ctxs):
2444 def warnverifyactions(ui, repo, actions, state, ctxs):
2445 try:
2445 try:
2446 verifyactions(actions, state, ctxs)
2446 verifyactions(actions, state, ctxs)
2447 except error.ParseError:
2447 except error.ParseError:
2448 if repo.vfs.exists(b'histedit-last-edit.txt'):
2448 if repo.vfs.exists(b'histedit-last-edit.txt'):
2449 ui.warn(
2449 ui.warn(
2450 _(
2450 _(
2451 b'warning: histedit rules saved '
2451 b'warning: histedit rules saved '
2452 b'to: .hg/histedit-last-edit.txt\n'
2452 b'to: .hg/histedit-last-edit.txt\n'
2453 )
2453 )
2454 )
2454 )
2455 raise
2455 raise
2456
2456
2457
2457
2458 def verifyactions(actions, state, ctxs):
2458 def verifyactions(actions, state, ctxs):
2459 """Verify that there exists exactly one action per given changeset and
2459 """Verify that there exists exactly one action per given changeset and
2460 other constraints.
2460 other constraints.
2461
2461
2462 Will abort if there are to many or too few rules, a malformed rule,
2462 Will abort if there are to many or too few rules, a malformed rule,
2463 or a rule on a changeset outside of the user-given range.
2463 or a rule on a changeset outside of the user-given range.
2464 """
2464 """
2465 expected = {c.node() for c in ctxs}
2465 expected = {c.node() for c in ctxs}
2466 seen = set()
2466 seen = set()
2467 prev = None
2467 prev = None
2468
2468
2469 if actions and actions[0].verb in [b'roll', b'fold']:
2469 if actions and actions[0].verb in [b'roll', b'fold']:
2470 raise error.ParseError(
2470 raise error.ParseError(
2471 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2471 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2472 )
2472 )
2473
2473
2474 for action in actions:
2474 for action in actions:
2475 action.verify(prev, expected, seen)
2475 action.verify(prev, expected, seen)
2476 prev = action
2476 prev = action
2477 if action.node is not None:
2477 if action.node is not None:
2478 seen.add(action.node)
2478 seen.add(action.node)
2479 missing = sorted(expected - seen) # sort to stabilize output
2479 missing = sorted(expected - seen) # sort to stabilize output
2480
2480
2481 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2481 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2482 if len(actions) == 0:
2482 if len(actions) == 0:
2483 raise error.ParseError(
2483 raise error.ParseError(
2484 _(b'no rules provided'),
2484 _(b'no rules provided'),
2485 hint=_(b'use strip extension to remove commits'),
2485 hint=_(b'use strip extension to remove commits'),
2486 )
2486 )
2487
2487
2488 drops = [drop(state, n) for n in missing]
2488 drops = [drop(state, n) for n in missing]
2489 # put the in the beginning so they execute immediately and
2489 # put the in the beginning so they execute immediately and
2490 # don't show in the edit-plan in the future
2490 # don't show in the edit-plan in the future
2491 actions[:0] = drops
2491 actions[:0] = drops
2492 elif missing:
2492 elif missing:
2493 raise error.ParseError(
2493 raise error.ParseError(
2494 _(b'missing rules for changeset %s') % short(missing[0]),
2494 _(b'missing rules for changeset %s') % short(missing[0]),
2495 hint=_(
2495 hint=_(
2496 b'use "drop %s" to discard, see also: '
2496 b'use "drop %s" to discard, see also: '
2497 b"'hg help -e histedit.config'"
2497 b"'hg help -e histedit.config'"
2498 )
2498 )
2499 % short(missing[0]),
2499 % short(missing[0]),
2500 )
2500 )
2501
2501
2502
2502
2503 def adjustreplacementsfrommarkers(repo, oldreplacements):
2503 def adjustreplacementsfrommarkers(repo, oldreplacements):
2504 """Adjust replacements from obsolescence markers
2504 """Adjust replacements from obsolescence markers
2505
2505
2506 Replacements structure is originally generated based on
2506 Replacements structure is originally generated based on
2507 histedit's state and does not account for changes that are
2507 histedit's state and does not account for changes that are
2508 not recorded there. This function fixes that by adding
2508 not recorded there. This function fixes that by adding
2509 data read from obsolescence markers"""
2509 data read from obsolescence markers"""
2510 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2510 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2511 return oldreplacements
2511 return oldreplacements
2512
2512
2513 unfi = repo.unfiltered()
2513 unfi = repo.unfiltered()
2514 get_rev = unfi.changelog.index.get_rev
2514 get_rev = unfi.changelog.index.get_rev
2515 obsstore = repo.obsstore
2515 obsstore = repo.obsstore
2516 newreplacements = list(oldreplacements)
2516 newreplacements = list(oldreplacements)
2517 oldsuccs = [r[1] for r in oldreplacements]
2517 oldsuccs = [r[1] for r in oldreplacements]
2518 # successors that have already been added to succstocheck once
2518 # successors that have already been added to succstocheck once
2519 seensuccs = set().union(
2519 seensuccs = set().union(
2520 *oldsuccs
2520 *oldsuccs
2521 ) # create a set from an iterable of tuples
2521 ) # create a set from an iterable of tuples
2522 succstocheck = list(seensuccs)
2522 succstocheck = list(seensuccs)
2523 while succstocheck:
2523 while succstocheck:
2524 n = succstocheck.pop()
2524 n = succstocheck.pop()
2525 missing = get_rev(n) is None
2525 missing = get_rev(n) is None
2526 markers = obsstore.successors.get(n, ())
2526 markers = obsstore.successors.get(n, ())
2527 if missing and not markers:
2527 if missing and not markers:
2528 # dead end, mark it as such
2528 # dead end, mark it as such
2529 newreplacements.append((n, ()))
2529 newreplacements.append((n, ()))
2530 for marker in markers:
2530 for marker in markers:
2531 nsuccs = marker[1]
2531 nsuccs = marker[1]
2532 newreplacements.append((n, nsuccs))
2532 newreplacements.append((n, nsuccs))
2533 for nsucc in nsuccs:
2533 for nsucc in nsuccs:
2534 if nsucc not in seensuccs:
2534 if nsucc not in seensuccs:
2535 seensuccs.add(nsucc)
2535 seensuccs.add(nsucc)
2536 succstocheck.append(nsucc)
2536 succstocheck.append(nsucc)
2537
2537
2538 return newreplacements
2538 return newreplacements
2539
2539
2540
2540
2541 def processreplacement(state):
2541 def processreplacement(state):
2542 """process the list of replacements to return
2542 """process the list of replacements to return
2543
2543
2544 1) the final mapping between original and created nodes
2544 1) the final mapping between original and created nodes
2545 2) the list of temporary node created by histedit
2545 2) the list of temporary node created by histedit
2546 3) the list of new commit created by histedit"""
2546 3) the list of new commit created by histedit"""
2547 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2547 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2548 allsuccs = set()
2548 allsuccs = set()
2549 replaced = set()
2549 replaced = set()
2550 fullmapping = {}
2550 fullmapping = {}
2551 # initialize basic set
2551 # initialize basic set
2552 # fullmapping records all operations recorded in replacement
2552 # fullmapping records all operations recorded in replacement
2553 for rep in replacements:
2553 for rep in replacements:
2554 allsuccs.update(rep[1])
2554 allsuccs.update(rep[1])
2555 replaced.add(rep[0])
2555 replaced.add(rep[0])
2556 fullmapping.setdefault(rep[0], set()).update(rep[1])
2556 fullmapping.setdefault(rep[0], set()).update(rep[1])
2557 new = allsuccs - replaced
2557 new = allsuccs - replaced
2558 tmpnodes = allsuccs & replaced
2558 tmpnodes = allsuccs & replaced
2559 # Reduce content fullmapping into direct relation between original nodes
2559 # Reduce content fullmapping into direct relation between original nodes
2560 # and final node created during history edition
2560 # and final node created during history edition
2561 # Dropped changeset are replaced by an empty list
2561 # Dropped changeset are replaced by an empty list
2562 toproceed = set(fullmapping)
2562 toproceed = set(fullmapping)
2563 final = {}
2563 final = {}
2564 while toproceed:
2564 while toproceed:
2565 for x in list(toproceed):
2565 for x in list(toproceed):
2566 succs = fullmapping[x]
2566 succs = fullmapping[x]
2567 for s in list(succs):
2567 for s in list(succs):
2568 if s in toproceed:
2568 if s in toproceed:
2569 # non final node with unknown closure
2569 # non final node with unknown closure
2570 # We can't process this now
2570 # We can't process this now
2571 break
2571 break
2572 elif s in final:
2572 elif s in final:
2573 # non final node, replace with closure
2573 # non final node, replace with closure
2574 succs.remove(s)
2574 succs.remove(s)
2575 succs.update(final[s])
2575 succs.update(final[s])
2576 else:
2576 else:
2577 final[x] = succs
2577 final[x] = succs
2578 toproceed.remove(x)
2578 toproceed.remove(x)
2579 # remove tmpnodes from final mapping
2579 # remove tmpnodes from final mapping
2580 for n in tmpnodes:
2580 for n in tmpnodes:
2581 del final[n]
2581 del final[n]
2582 # we expect all changes involved in final to exist in the repo
2582 # we expect all changes involved in final to exist in the repo
2583 # turn `final` into list (topologically sorted)
2583 # turn `final` into list (topologically sorted)
2584 get_rev = state.repo.changelog.index.get_rev
2584 get_rev = state.repo.changelog.index.get_rev
2585 for prec, succs in final.items():
2585 for prec, succs in final.items():
2586 final[prec] = sorted(succs, key=get_rev)
2586 final[prec] = sorted(succs, key=get_rev)
2587
2587
2588 # computed topmost element (necessary for bookmark)
2588 # computed topmost element (necessary for bookmark)
2589 if new:
2589 if new:
2590 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2590 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2591 elif not final:
2591 elif not final:
2592 # Nothing rewritten at all. we won't need `newtopmost`
2592 # Nothing rewritten at all. we won't need `newtopmost`
2593 # It is the same as `oldtopmost` and `processreplacement` know it
2593 # It is the same as `oldtopmost` and `processreplacement` know it
2594 newtopmost = None
2594 newtopmost = None
2595 else:
2595 else:
2596 # every body died. The newtopmost is the parent of the root.
2596 # every body died. The newtopmost is the parent of the root.
2597 r = state.repo.changelog.rev
2597 r = state.repo.changelog.rev
2598 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2598 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2599
2599
2600 return final, tmpnodes, new, newtopmost
2600 return final, tmpnodes, new, newtopmost
2601
2601
2602
2602
2603 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2603 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2604 """Move bookmark from oldtopmost to newly created topmost
2604 """Move bookmark from oldtopmost to newly created topmost
2605
2605
2606 This is arguably a feature and we may only want that for the active
2606 This is arguably a feature and we may only want that for the active
2607 bookmark. But the behavior is kept compatible with the old version for now.
2607 bookmark. But the behavior is kept compatible with the old version for now.
2608 """
2608 """
2609 if not oldtopmost or not newtopmost:
2609 if not oldtopmost or not newtopmost:
2610 return
2610 return
2611 oldbmarks = repo.nodebookmarks(oldtopmost)
2611 oldbmarks = repo.nodebookmarks(oldtopmost)
2612 if oldbmarks:
2612 if oldbmarks:
2613 with repo.lock(), repo.transaction(b'histedit') as tr:
2613 with repo.lock(), repo.transaction(b'histedit') as tr:
2614 marks = repo._bookmarks
2614 marks = repo._bookmarks
2615 changes = []
2615 changes = []
2616 for name in oldbmarks:
2616 for name in oldbmarks:
2617 changes.append((name, newtopmost))
2617 changes.append((name, newtopmost))
2618 marks.applychanges(repo, tr, changes)
2618 marks.applychanges(repo, tr, changes)
2619
2619
2620
2620
2621 def cleanupnode(ui, repo, nodes, nobackup=False):
2621 def cleanupnode(ui, repo, nodes, nobackup=False):
2622 """strip a group of nodes from the repository
2622 """strip a group of nodes from the repository
2623
2623
2624 The set of node to strip may contains unknown nodes."""
2624 The set of node to strip may contains unknown nodes."""
2625 with repo.lock():
2625 with repo.lock():
2626 # do not let filtering get in the way of the cleanse
2626 # do not let filtering get in the way of the cleanse
2627 # we should probably get rid of obsolescence marker created during the
2627 # we should probably get rid of obsolescence marker created during the
2628 # histedit, but we currently do not have such information.
2628 # histedit, but we currently do not have such information.
2629 repo = repo.unfiltered()
2629 repo = repo.unfiltered()
2630 # Find all nodes that need to be stripped
2630 # Find all nodes that need to be stripped
2631 # (we use %lr instead of %ln to silently ignore unknown items)
2631 # (we use %lr instead of %ln to silently ignore unknown items)
2632 has_node = repo.changelog.index.has_node
2632 has_node = repo.changelog.index.has_node
2633 nodes = sorted(n for n in nodes if has_node(n))
2633 nodes = sorted(n for n in nodes if has_node(n))
2634 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2634 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2635 if roots:
2635 if roots:
2636 backup = not nobackup
2636 backup = not nobackup
2637 repair.strip(ui, repo, roots, backup=backup)
2637 repair.strip(ui, repo, roots, backup=backup)
2638
2638
2639
2639
2640 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2640 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2641 if isinstance(nodelist, bytes):
2641 if isinstance(nodelist, bytes):
2642 nodelist = [nodelist]
2642 nodelist = [nodelist]
2643 state = histeditstate(repo)
2643 state = histeditstate(repo)
2644 if state.inprogress():
2644 if state.inprogress():
2645 state.read()
2645 state.read()
2646 histedit_nodes = {
2646 histedit_nodes = {
2647 action.node for action in state.actions if action.node
2647 action.node for action in state.actions if action.node
2648 }
2648 }
2649 common_nodes = histedit_nodes & set(nodelist)
2649 common_nodes = histedit_nodes & set(nodelist)
2650 if common_nodes:
2650 if common_nodes:
2651 raise error.Abort(
2651 raise error.Abort(
2652 _(b"histedit in progress, can't strip %s")
2652 _(b"histedit in progress, can't strip %s")
2653 % b', '.join(short(x) for x in common_nodes)
2653 % b', '.join(short(x) for x in common_nodes)
2654 )
2654 )
2655 return orig(ui, repo, nodelist, *args, **kwargs)
2655 return orig(ui, repo, nodelist, *args, **kwargs)
2656
2656
2657
2657
2658 extensions.wrapfunction(repair, 'strip', stripwrapper)
2658 extensions.wrapfunction(repair, 'strip', stripwrapper)
2659
2659
2660
2660
2661 def summaryhook(ui, repo):
2661 def summaryhook(ui, repo):
2662 state = histeditstate(repo)
2662 state = histeditstate(repo)
2663 if not state.inprogress():
2663 if not state.inprogress():
2664 return
2664 return
2665 state.read()
2665 state.read()
2666 if state.actions:
2666 if state.actions:
2667 # i18n: column positioning for "hg summary"
2667 # i18n: column positioning for "hg summary"
2668 ui.write(
2668 ui.write(
2669 _(b'hist: %s (histedit --continue)\n')
2669 _(b'hist: %s (histedit --continue)\n')
2670 % (
2670 % (
2671 ui.label(_(b'%d remaining'), b'histedit.remaining')
2671 ui.label(_(b'%d remaining'), b'histedit.remaining')
2672 % len(state.actions)
2672 % len(state.actions)
2673 )
2673 )
2674 )
2674 )
2675
2675
2676
2676
2677 def extsetup(ui):
2677 def extsetup(ui):
2678 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2678 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2679 statemod.addunfinished(
2679 statemod.addunfinished(
2680 b'histedit',
2680 b'histedit',
2681 fname=b'histedit-state',
2681 fname=b'histedit-state',
2682 allowcommit=True,
2682 allowcommit=True,
2683 continueflag=True,
2683 continueflag=True,
2684 abortfunc=hgaborthistedit,
2684 abortfunc=hgaborthistedit,
2685 )
2685 )
@@ -1,1940 +1,1945
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import contextlib
11 import contextlib
12 import copy
12 import copy
13 import os
13 import os
14 from typing import (
14 from typing import (
15 Optional,
15 Optional,
16 )
16 )
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19
19
20 from mercurial.pycompat import open
20 from mercurial.pycompat import open
21
21
22 from mercurial.hgweb import webcommands
22 from mercurial.hgweb import webcommands
23
23
24 from mercurial import (
24 from mercurial import (
25 archival,
25 archival,
26 cmdutil,
26 cmdutil,
27 copies as copiesmod,
27 copies as copiesmod,
28 dirstate,
28 dirstate,
29 error,
29 error,
30 exchange,
30 exchange,
31 extensions,
31 extensions,
32 exthelper,
32 exthelper,
33 filemerge,
33 filemerge,
34 hg,
34 hg,
35 logcmdutil,
35 logcmdutil,
36 match as matchmod,
36 match as matchmod,
37 merge,
37 merge,
38 mergestate as mergestatemod,
38 mergestate as mergestatemod,
39 pathutil,
39 pathutil,
40 pycompat,
40 pycompat,
41 scmutil,
41 scmutil,
42 smartset,
42 smartset,
43 subrepo,
43 subrepo,
44 url as urlmod,
44 url as urlmod,
45 util,
45 util,
46 )
46 )
47
47
48 from mercurial.upgrade_utils import (
48 from mercurial.upgrade_utils import (
49 actions as upgrade_actions,
49 actions as upgrade_actions,
50 )
50 )
51
51
52 from mercurial.utils import urlutil
53
52 from . import (
54 from . import (
53 lfcommands,
55 lfcommands,
54 lfutil,
56 lfutil,
55 storefactory,
57 storefactory,
56 )
58 )
57
59
58 ACTION_ADD = mergestatemod.ACTION_ADD
60 ACTION_ADD = mergestatemod.ACTION_ADD
59 ACTION_DELETED_CHANGED = mergestatemod.ACTION_DELETED_CHANGED
61 ACTION_DELETED_CHANGED = mergestatemod.ACTION_DELETED_CHANGED
60 ACTION_GET = mergestatemod.ACTION_GET
62 ACTION_GET = mergestatemod.ACTION_GET
61 ACTION_KEEP = mergestatemod.ACTION_KEEP
63 ACTION_KEEP = mergestatemod.ACTION_KEEP
62 ACTION_REMOVE = mergestatemod.ACTION_REMOVE
64 ACTION_REMOVE = mergestatemod.ACTION_REMOVE
63
65
64 eh = exthelper.exthelper()
66 eh = exthelper.exthelper()
65
67
66 lfstatus = lfutil.lfstatus
68 lfstatus = lfutil.lfstatus
67
69
68 MERGE_ACTION_LARGEFILE_MARK_REMOVED = mergestatemod.MergeAction('lfmr')
70 MERGE_ACTION_LARGEFILE_MARK_REMOVED = mergestatemod.MergeAction('lfmr')
69
71
70 # -- Utility functions: commonly/repeatedly needed functionality ---------------
72 # -- Utility functions: commonly/repeatedly needed functionality ---------------
71
73
72
74
73 def composelargefilematcher(match, manifest):
75 def composelargefilematcher(match, manifest):
74 """create a matcher that matches only the largefiles in the original
76 """create a matcher that matches only the largefiles in the original
75 matcher"""
77 matcher"""
76 m = copy.copy(match)
78 m = copy.copy(match)
77 m._was_tampered_with = True
79 m._was_tampered_with = True
78 lfile = lambda f: lfutil.standin(f) in manifest
80 lfile = lambda f: lfutil.standin(f) in manifest
79 m._files = [lf for lf in m._files if lfile(lf)]
81 m._files = [lf for lf in m._files if lfile(lf)]
80 m._fileset = set(m._files)
82 m._fileset = set(m._files)
81 m.always = lambda: False
83 m.always = lambda: False
82 origmatchfn = m.matchfn
84 origmatchfn = m.matchfn
83 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
85 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
84 return m
86 return m
85
87
86
88
87 def composenormalfilematcher(match, manifest, exclude=None):
89 def composenormalfilematcher(match, manifest, exclude=None):
88 excluded = set()
90 excluded = set()
89 if exclude is not None:
91 if exclude is not None:
90 excluded.update(exclude)
92 excluded.update(exclude)
91
93
92 m = copy.copy(match)
94 m = copy.copy(match)
93 m._was_tampered_with = True
95 m._was_tampered_with = True
94 notlfile = lambda f: not (
96 notlfile = lambda f: not (
95 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
97 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
96 )
98 )
97 m._files = [lf for lf in m._files if notlfile(lf)]
99 m._files = [lf for lf in m._files if notlfile(lf)]
98 m._fileset = set(m._files)
100 m._fileset = set(m._files)
99 m.always = lambda: False
101 m.always = lambda: False
100 origmatchfn = m.matchfn
102 origmatchfn = m.matchfn
101 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
103 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
102 return m
104 return m
103
105
104
106
105 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
107 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
106 large = opts.get('large')
108 large = opts.get('large')
107 lfsize = lfutil.getminsize(
109 lfsize = lfutil.getminsize(
108 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
110 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
109 )
111 )
110
112
111 lfmatcher = None
113 lfmatcher = None
112 if lfutil.islfilesrepo(repo):
114 if lfutil.islfilesrepo(repo):
113 lfpats = ui.configlist(lfutil.longname, b'patterns')
115 lfpats = ui.configlist(lfutil.longname, b'patterns')
114 if lfpats:
116 if lfpats:
115 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
117 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
116
118
117 lfnames = []
119 lfnames = []
118 m = matcher
120 m = matcher
119
121
120 wctx = repo[None]
122 wctx = repo[None]
121 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
123 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
122 exact = m.exact(f)
124 exact = m.exact(f)
123 lfile = lfutil.standin(f) in wctx
125 lfile = lfutil.standin(f) in wctx
124 nfile = f in wctx
126 nfile = f in wctx
125 exists = lfile or nfile
127 exists = lfile or nfile
126
128
127 # Don't warn the user when they attempt to add a normal tracked file.
129 # Don't warn the user when they attempt to add a normal tracked file.
128 # The normal add code will do that for us.
130 # The normal add code will do that for us.
129 if exact and exists:
131 if exact and exists:
130 if lfile:
132 if lfile:
131 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
133 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
132 continue
134 continue
133
135
134 if (exact or not exists) and not lfutil.isstandin(f):
136 if (exact or not exists) and not lfutil.isstandin(f):
135 # In case the file was removed previously, but not committed
137 # In case the file was removed previously, but not committed
136 # (issue3507)
138 # (issue3507)
137 if not repo.wvfs.exists(f):
139 if not repo.wvfs.exists(f):
138 continue
140 continue
139
141
140 abovemin = (
142 abovemin = (
141 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
143 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
142 )
144 )
143 if large or abovemin or (lfmatcher and lfmatcher(f)):
145 if large or abovemin or (lfmatcher and lfmatcher(f)):
144 lfnames.append(f)
146 lfnames.append(f)
145 if ui.verbose or not exact:
147 if ui.verbose or not exact:
146 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
148 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
147
149
148 bad = []
150 bad = []
149
151
150 # Need to lock, otherwise there could be a race condition between
152 # Need to lock, otherwise there could be a race condition between
151 # when standins are created and added to the repo.
153 # when standins are created and added to the repo.
152 with repo.wlock():
154 with repo.wlock():
153 if not opts.get('dry_run'):
155 if not opts.get('dry_run'):
154 standins = []
156 standins = []
155 lfdirstate = lfutil.openlfdirstate(ui, repo)
157 lfdirstate = lfutil.openlfdirstate(ui, repo)
156 for f in lfnames:
158 for f in lfnames:
157 standinname = lfutil.standin(f)
159 standinname = lfutil.standin(f)
158 lfutil.writestandin(
160 lfutil.writestandin(
159 repo,
161 repo,
160 standinname,
162 standinname,
161 hash=b'',
163 hash=b'',
162 executable=lfutil.getexecutable(repo.wjoin(f)),
164 executable=lfutil.getexecutable(repo.wjoin(f)),
163 )
165 )
164 standins.append(standinname)
166 standins.append(standinname)
165 lfdirstate.set_tracked(f)
167 lfdirstate.set_tracked(f)
166 lfdirstate.write(repo.currenttransaction())
168 lfdirstate.write(repo.currenttransaction())
167 bad += [
169 bad += [
168 lfutil.splitstandin(f)
170 lfutil.splitstandin(f)
169 for f in repo[None].add(standins)
171 for f in repo[None].add(standins)
170 if f in m.files()
172 if f in m.files()
171 ]
173 ]
172
174
173 added = [f for f in lfnames if f not in bad]
175 added = [f for f in lfnames if f not in bad]
174 return added, bad
176 return added, bad
175
177
176
178
177 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
179 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
178 after = opts.get('after')
180 after = opts.get('after')
179 m = composelargefilematcher(matcher, repo[None].manifest())
181 m = composelargefilematcher(matcher, repo[None].manifest())
180 with lfstatus(repo):
182 with lfstatus(repo):
181 s = repo.status(match=m, clean=not isaddremove)
183 s = repo.status(match=m, clean=not isaddremove)
182 manifest = repo[None].manifest()
184 manifest = repo[None].manifest()
183 modified, added, deleted, clean = [
185 modified, added, deleted, clean = [
184 [f for f in list if lfutil.standin(f) in manifest]
186 [f for f in list if lfutil.standin(f) in manifest]
185 for list in (s.modified, s.added, s.deleted, s.clean)
187 for list in (s.modified, s.added, s.deleted, s.clean)
186 ]
188 ]
187
189
188 def warn(files, msg):
190 def warn(files, msg):
189 for f in files:
191 for f in files:
190 ui.warn(msg % uipathfn(f))
192 ui.warn(msg % uipathfn(f))
191 return int(len(files) > 0)
193 return int(len(files) > 0)
192
194
193 if after:
195 if after:
194 remove = deleted
196 remove = deleted
195 result = warn(
197 result = warn(
196 modified + added + clean, _(b'not removing %s: file still exists\n')
198 modified + added + clean, _(b'not removing %s: file still exists\n')
197 )
199 )
198 else:
200 else:
199 remove = deleted + clean
201 remove = deleted + clean
200 result = warn(
202 result = warn(
201 modified,
203 modified,
202 _(
204 _(
203 b'not removing %s: file is modified (use -f'
205 b'not removing %s: file is modified (use -f'
204 b' to force removal)\n'
206 b' to force removal)\n'
205 ),
207 ),
206 )
208 )
207 result = (
209 result = (
208 warn(
210 warn(
209 added,
211 added,
210 _(
212 _(
211 b'not removing %s: file has been marked for add'
213 b'not removing %s: file has been marked for add'
212 b' (use forget to undo)\n'
214 b' (use forget to undo)\n'
213 ),
215 ),
214 )
216 )
215 or result
217 or result
216 )
218 )
217
219
218 # Need to lock because standin files are deleted then removed from the
220 # Need to lock because standin files are deleted then removed from the
219 # repository and we could race in-between.
221 # repository and we could race in-between.
220 with repo.wlock():
222 with repo.wlock():
221 lfdirstate = lfutil.openlfdirstate(ui, repo)
223 lfdirstate = lfutil.openlfdirstate(ui, repo)
222 for f in sorted(remove):
224 for f in sorted(remove):
223 if ui.verbose or not m.exact(f):
225 if ui.verbose or not m.exact(f):
224 ui.status(_(b'removing %s\n') % uipathfn(f))
226 ui.status(_(b'removing %s\n') % uipathfn(f))
225
227
226 if not dryrun:
228 if not dryrun:
227 if not after:
229 if not after:
228 repo.wvfs.unlinkpath(f, ignoremissing=True)
230 repo.wvfs.unlinkpath(f, ignoremissing=True)
229
231
230 if dryrun:
232 if dryrun:
231 return result
233 return result
232
234
233 remove = [lfutil.standin(f) for f in remove]
235 remove = [lfutil.standin(f) for f in remove]
234 # If this is being called by addremove, let the original addremove
236 # If this is being called by addremove, let the original addremove
235 # function handle this.
237 # function handle this.
236 if not isaddremove:
238 if not isaddremove:
237 for f in remove:
239 for f in remove:
238 repo.wvfs.unlinkpath(f, ignoremissing=True)
240 repo.wvfs.unlinkpath(f, ignoremissing=True)
239 repo[None].forget(remove)
241 repo[None].forget(remove)
240
242
241 for f in remove:
243 for f in remove:
242 lfdirstate.set_untracked(lfutil.splitstandin(f))
244 lfdirstate.set_untracked(lfutil.splitstandin(f))
243
245
244 lfdirstate.write(repo.currenttransaction())
246 lfdirstate.write(repo.currenttransaction())
245
247
246 return result
248 return result
247
249
248
250
249 # For overriding mercurial.hgweb.webcommands so that largefiles will
251 # For overriding mercurial.hgweb.webcommands so that largefiles will
250 # appear at their right place in the manifests.
252 # appear at their right place in the manifests.
251 @eh.wrapfunction(webcommands, 'decodepath')
253 @eh.wrapfunction(webcommands, 'decodepath')
252 def decodepath(orig, path):
254 def decodepath(orig, path):
253 return lfutil.splitstandin(path) or path
255 return lfutil.splitstandin(path) or path
254
256
255
257
256 # -- Wrappers: modify existing commands --------------------------------
258 # -- Wrappers: modify existing commands --------------------------------
257
259
258
260
259 @eh.wrapcommand(
261 @eh.wrapcommand(
260 b'add',
262 b'add',
261 opts=[
263 opts=[
262 (b'', b'large', None, _(b'add as largefile')),
264 (b'', b'large', None, _(b'add as largefile')),
263 (b'', b'normal', None, _(b'add as normal file')),
265 (b'', b'normal', None, _(b'add as normal file')),
264 (
266 (
265 b'',
267 b'',
266 b'lfsize',
268 b'lfsize',
267 b'',
269 b'',
268 _(
270 _(
269 b'add all files above this size (in megabytes) '
271 b'add all files above this size (in megabytes) '
270 b'as largefiles (default: 10)'
272 b'as largefiles (default: 10)'
271 ),
273 ),
272 ),
274 ),
273 ],
275 ],
274 )
276 )
275 def overrideadd(orig, ui, repo, *pats, **opts):
277 def overrideadd(orig, ui, repo, *pats, **opts):
276 if opts.get('normal') and opts.get('large'):
278 if opts.get('normal') and opts.get('large'):
277 raise error.Abort(_(b'--normal cannot be used with --large'))
279 raise error.Abort(_(b'--normal cannot be used with --large'))
278 return orig(ui, repo, *pats, **opts)
280 return orig(ui, repo, *pats, **opts)
279
281
280
282
281 @eh.wrapfunction(cmdutil, 'add')
283 @eh.wrapfunction(cmdutil, 'add')
282 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
284 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
283 # The --normal flag short circuits this override
285 # The --normal flag short circuits this override
284 if opts.get('normal'):
286 if opts.get('normal'):
285 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
287 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
286
288
287 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
289 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
288 normalmatcher = composenormalfilematcher(
290 normalmatcher = composenormalfilematcher(
289 matcher, repo[None].manifest(), ladded
291 matcher, repo[None].manifest(), ladded
290 )
292 )
291 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
293 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
292
294
293 bad.extend(f for f in lbad)
295 bad.extend(f for f in lbad)
294 return bad
296 return bad
295
297
296
298
297 @eh.wrapfunction(cmdutil, 'remove')
299 @eh.wrapfunction(cmdutil, 'remove')
298 def cmdutilremove(
300 def cmdutilremove(
299 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
301 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
300 ):
302 ):
301 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
303 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
302 result = orig(
304 result = orig(
303 ui,
305 ui,
304 repo,
306 repo,
305 normalmatcher,
307 normalmatcher,
306 prefix,
308 prefix,
307 uipathfn,
309 uipathfn,
308 after,
310 after,
309 force,
311 force,
310 subrepos,
312 subrepos,
311 dryrun,
313 dryrun,
312 )
314 )
313 return (
315 return (
314 removelargefiles(
316 removelargefiles(
315 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
317 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
316 )
318 )
317 or result
319 or result
318 )
320 )
319
321
320
322
321 @eh.wrapfunction(dirstate.dirstate, '_changing')
323 @eh.wrapfunction(dirstate.dirstate, '_changing')
322 @contextlib.contextmanager
324 @contextlib.contextmanager
323 def _changing(orig, self, repo, change_type):
325 def _changing(orig, self, repo, change_type):
324 pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
326 pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
325 try:
327 try:
326 lfd = getattr(self, '_large_file_dirstate', False)
328 lfd = getattr(self, '_large_file_dirstate', False)
327 if sub_dirstate is None and not lfd:
329 if sub_dirstate is None and not lfd:
328 sub_dirstate = lfutil.openlfdirstate(repo.ui, repo)
330 sub_dirstate = lfutil.openlfdirstate(repo.ui, repo)
329 self._sub_dirstate = sub_dirstate
331 self._sub_dirstate = sub_dirstate
330 if not lfd:
332 if not lfd:
331 assert self._sub_dirstate is not None
333 assert self._sub_dirstate is not None
332 with orig(self, repo, change_type):
334 with orig(self, repo, change_type):
333 if sub_dirstate is None:
335 if sub_dirstate is None:
334 yield
336 yield
335 else:
337 else:
336 with sub_dirstate._changing(repo, change_type):
338 with sub_dirstate._changing(repo, change_type):
337 yield
339 yield
338 finally:
340 finally:
339 self._sub_dirstate = pre
341 self._sub_dirstate = pre
340
342
341
343
342 @eh.wrapfunction(dirstate.dirstate, 'running_status')
344 @eh.wrapfunction(dirstate.dirstate, 'running_status')
343 @contextlib.contextmanager
345 @contextlib.contextmanager
344 def running_status(orig, self, repo):
346 def running_status(orig, self, repo):
345 pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
347 pre = sub_dirstate = getattr(self, '_sub_dirstate', None)
346 try:
348 try:
347 lfd = getattr(self, '_large_file_dirstate', False)
349 lfd = getattr(self, '_large_file_dirstate', False)
348 if sub_dirstate is None and not lfd:
350 if sub_dirstate is None and not lfd:
349 sub_dirstate = lfutil.openlfdirstate(repo.ui, repo)
351 sub_dirstate = lfutil.openlfdirstate(repo.ui, repo)
350 self._sub_dirstate = sub_dirstate
352 self._sub_dirstate = sub_dirstate
351 if not lfd:
353 if not lfd:
352 assert self._sub_dirstate is not None
354 assert self._sub_dirstate is not None
353 with orig(self, repo):
355 with orig(self, repo):
354 if sub_dirstate is None:
356 if sub_dirstate is None:
355 yield
357 yield
356 else:
358 else:
357 with sub_dirstate.running_status(repo):
359 with sub_dirstate.running_status(repo):
358 yield
360 yield
359 finally:
361 finally:
360 self._sub_dirstate = pre
362 self._sub_dirstate = pre
361
363
362
364
363 @eh.wrapfunction(subrepo.hgsubrepo, 'status')
365 @eh.wrapfunction(subrepo.hgsubrepo, 'status')
364 def overridestatusfn(orig, repo, rev2, **opts):
366 def overridestatusfn(orig, repo, rev2, **opts):
365 with lfstatus(repo._repo):
367 with lfstatus(repo._repo):
366 return orig(repo, rev2, **opts)
368 return orig(repo, rev2, **opts)
367
369
368
370
369 @eh.wrapcommand(b'status')
371 @eh.wrapcommand(b'status')
370 def overridestatus(orig, ui, repo, *pats, **opts):
372 def overridestatus(orig, ui, repo, *pats, **opts):
371 with lfstatus(repo):
373 with lfstatus(repo):
372 return orig(ui, repo, *pats, **opts)
374 return orig(ui, repo, *pats, **opts)
373
375
374
376
375 @eh.wrapfunction(subrepo.hgsubrepo, 'dirty')
377 @eh.wrapfunction(subrepo.hgsubrepo, 'dirty')
376 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
378 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
377 with lfstatus(repo._repo):
379 with lfstatus(repo._repo):
378 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
380 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
379
381
380
382
381 @eh.wrapcommand(b'log')
383 @eh.wrapcommand(b'log')
382 def overridelog(orig, ui, repo, *pats, **opts):
384 def overridelog(orig, ui, repo, *pats, **opts):
383 def overridematchandpats(
385 def overridematchandpats(
384 orig,
386 orig,
385 ctx,
387 ctx,
386 pats=(),
388 pats=(),
387 opts=None,
389 opts=None,
388 globbed=False,
390 globbed=False,
389 default=b'relpath',
391 default=b'relpath',
390 badfn=None,
392 badfn=None,
391 ):
393 ):
392 """Matcher that merges root directory with .hglf, suitable for log.
394 """Matcher that merges root directory with .hglf, suitable for log.
393 It is still possible to match .hglf directly.
395 It is still possible to match .hglf directly.
394 For any listed files run log on the standin too.
396 For any listed files run log on the standin too.
395 matchfn tries both the given filename and with .hglf stripped.
397 matchfn tries both the given filename and with .hglf stripped.
396 """
398 """
397 if opts is None:
399 if opts is None:
398 opts = {}
400 opts = {}
399 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
401 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
400 m, p = copy.copy(matchandpats)
402 m, p = copy.copy(matchandpats)
401
403
402 if m.always():
404 if m.always():
403 # We want to match everything anyway, so there's no benefit trying
405 # We want to match everything anyway, so there's no benefit trying
404 # to add standins.
406 # to add standins.
405 return matchandpats
407 return matchandpats
406
408
407 pats = set(p)
409 pats = set(p)
408
410
409 def fixpats(pat, tostandin=lfutil.standin):
411 def fixpats(pat, tostandin=lfutil.standin):
410 if pat.startswith(b'set:'):
412 if pat.startswith(b'set:'):
411 return pat
413 return pat
412
414
413 kindpat = matchmod._patsplit(pat, None)
415 kindpat = matchmod._patsplit(pat, None)
414
416
415 if kindpat[0] is not None:
417 if kindpat[0] is not None:
416 return kindpat[0] + b':' + tostandin(kindpat[1])
418 return kindpat[0] + b':' + tostandin(kindpat[1])
417 return tostandin(kindpat[1])
419 return tostandin(kindpat[1])
418
420
419 cwd = repo.getcwd()
421 cwd = repo.getcwd()
420 if cwd:
422 if cwd:
421 hglf = lfutil.shortname
423 hglf = lfutil.shortname
422 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
424 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
423
425
424 def tostandin(f):
426 def tostandin(f):
425 # The file may already be a standin, so truncate the back
427 # The file may already be a standin, so truncate the back
426 # prefix and test before mangling it. This avoids turning
428 # prefix and test before mangling it. This avoids turning
427 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
429 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
428 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
430 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
429 return f
431 return f
430
432
431 # An absolute path is from outside the repo, so truncate the
433 # An absolute path is from outside the repo, so truncate the
432 # path to the root before building the standin. Otherwise cwd
434 # path to the root before building the standin. Otherwise cwd
433 # is somewhere in the repo, relative to root, and needs to be
435 # is somewhere in the repo, relative to root, and needs to be
434 # prepended before building the standin.
436 # prepended before building the standin.
435 if os.path.isabs(cwd):
437 if os.path.isabs(cwd):
436 f = f[len(back) :]
438 f = f[len(back) :]
437 else:
439 else:
438 f = cwd + b'/' + f
440 f = cwd + b'/' + f
439 return back + lfutil.standin(f)
441 return back + lfutil.standin(f)
440
442
441 else:
443 else:
442
444
443 def tostandin(f):
445 def tostandin(f):
444 if lfutil.isstandin(f):
446 if lfutil.isstandin(f):
445 return f
447 return f
446 return lfutil.standin(f)
448 return lfutil.standin(f)
447
449
448 pats.update(fixpats(f, tostandin) for f in p)
450 pats.update(fixpats(f, tostandin) for f in p)
449
451
450 m._was_tampered_with = True
452 m._was_tampered_with = True
451
453
452 for i in range(0, len(m._files)):
454 for i in range(0, len(m._files)):
453 # Don't add '.hglf' to m.files, since that is already covered by '.'
455 # Don't add '.hglf' to m.files, since that is already covered by '.'
454 if m._files[i] == b'.':
456 if m._files[i] == b'.':
455 continue
457 continue
456 standin = lfutil.standin(m._files[i])
458 standin = lfutil.standin(m._files[i])
457 # If the "standin" is a directory, append instead of replace to
459 # If the "standin" is a directory, append instead of replace to
458 # support naming a directory on the command line with only
460 # support naming a directory on the command line with only
459 # largefiles. The original directory is kept to support normal
461 # largefiles. The original directory is kept to support normal
460 # files.
462 # files.
461 if standin in ctx:
463 if standin in ctx:
462 m._files[i] = standin
464 m._files[i] = standin
463 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
465 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
464 m._files.append(standin)
466 m._files.append(standin)
465
467
466 m._fileset = set(m._files)
468 m._fileset = set(m._files)
467 m.always = lambda: False
469 m.always = lambda: False
468 origmatchfn = m.matchfn
470 origmatchfn = m.matchfn
469
471
470 def lfmatchfn(f):
472 def lfmatchfn(f):
471 lf = lfutil.splitstandin(f)
473 lf = lfutil.splitstandin(f)
472 if lf is not None and origmatchfn(lf):
474 if lf is not None and origmatchfn(lf):
473 return True
475 return True
474 r = origmatchfn(f)
476 r = origmatchfn(f)
475 return r
477 return r
476
478
477 m.matchfn = lfmatchfn
479 m.matchfn = lfmatchfn
478
480
479 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
481 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
480 return m, pats
482 return m, pats
481
483
482 # For hg log --patch, the match object is used in two different senses:
484 # For hg log --patch, the match object is used in two different senses:
483 # (1) to determine what revisions should be printed out, and
485 # (1) to determine what revisions should be printed out, and
484 # (2) to determine what files to print out diffs for.
486 # (2) to determine what files to print out diffs for.
485 # The magic matchandpats override should be used for case (1) but not for
487 # The magic matchandpats override should be used for case (1) but not for
486 # case (2).
488 # case (2).
487 oldmatchandpats = scmutil.matchandpats
489 oldmatchandpats = scmutil.matchandpats
488
490
489 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
491 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
490 wctx = repo[None]
492 wctx = repo[None]
491 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
493 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
492 return lambda ctx: match
494 return lambda ctx: match
493
495
494 wrappedmatchandpats = extensions.wrappedfunction(
496 wrappedmatchandpats = extensions.wrappedfunction(
495 scmutil, 'matchandpats', overridematchandpats
497 scmutil, 'matchandpats', overridematchandpats
496 )
498 )
497 wrappedmakefilematcher = extensions.wrappedfunction(
499 wrappedmakefilematcher = extensions.wrappedfunction(
498 logcmdutil, '_makenofollowfilematcher', overridemakefilematcher
500 logcmdutil, '_makenofollowfilematcher', overridemakefilematcher
499 )
501 )
500 with wrappedmatchandpats, wrappedmakefilematcher:
502 with wrappedmatchandpats, wrappedmakefilematcher:
501 return orig(ui, repo, *pats, **opts)
503 return orig(ui, repo, *pats, **opts)
502
504
503
505
504 @eh.wrapcommand(
506 @eh.wrapcommand(
505 b'verify',
507 b'verify',
506 opts=[
508 opts=[
507 (
509 (
508 b'',
510 b'',
509 b'large',
511 b'large',
510 None,
512 None,
511 _(b'verify that all largefiles in current revision exists'),
513 _(b'verify that all largefiles in current revision exists'),
512 ),
514 ),
513 (
515 (
514 b'',
516 b'',
515 b'lfa',
517 b'lfa',
516 None,
518 None,
517 _(b'verify largefiles in all revisions, not just current'),
519 _(b'verify largefiles in all revisions, not just current'),
518 ),
520 ),
519 (
521 (
520 b'',
522 b'',
521 b'lfc',
523 b'lfc',
522 None,
524 None,
523 _(b'verify local largefile contents, not just existence'),
525 _(b'verify local largefile contents, not just existence'),
524 ),
526 ),
525 ],
527 ],
526 )
528 )
527 def overrideverify(orig, ui, repo, *pats, **opts):
529 def overrideverify(orig, ui, repo, *pats, **opts):
528 large = opts.pop('large', False)
530 large = opts.pop('large', False)
529 all = opts.pop('lfa', False)
531 all = opts.pop('lfa', False)
530 contents = opts.pop('lfc', False)
532 contents = opts.pop('lfc', False)
531
533
532 result = orig(ui, repo, *pats, **opts)
534 result = orig(ui, repo, *pats, **opts)
533 if large or all or contents:
535 if large or all or contents:
534 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
536 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
535 return result
537 return result
536
538
537
539
538 @eh.wrapcommand(
540 @eh.wrapcommand(
539 b'debugstate',
541 b'debugstate',
540 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
542 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
541 )
543 )
542 def overridedebugstate(orig, ui, repo, *pats, **opts):
544 def overridedebugstate(orig, ui, repo, *pats, **opts):
543 large = opts.pop('large', False)
545 large = opts.pop('large', False)
544 if large:
546 if large:
545
547
546 class fakerepo:
548 class fakerepo:
547 dirstate = lfutil.openlfdirstate(ui, repo)
549 dirstate = lfutil.openlfdirstate(ui, repo)
548
550
549 orig(ui, fakerepo, *pats, **opts)
551 orig(ui, fakerepo, *pats, **opts)
550 else:
552 else:
551 orig(ui, repo, *pats, **opts)
553 orig(ui, repo, *pats, **opts)
552
554
553
555
554 # Before starting the manifest merge, merge.updates will call
556 # Before starting the manifest merge, merge.updates will call
555 # _checkunknownfile to check if there are any files in the merged-in
557 # _checkunknownfile to check if there are any files in the merged-in
556 # changeset that collide with unknown files in the working copy.
558 # changeset that collide with unknown files in the working copy.
557 #
559 #
558 # The largefiles are seen as unknown, so this prevents us from merging
560 # The largefiles are seen as unknown, so this prevents us from merging
559 # in a file 'foo' if we already have a largefile with the same name.
561 # in a file 'foo' if we already have a largefile with the same name.
560 #
562 #
561 # The overridden function filters the unknown files by removing any
563 # The overridden function filters the unknown files by removing any
562 # largefiles. This makes the merge proceed and we can then handle this
564 # largefiles. This makes the merge proceed and we can then handle this
563 # case further in the overridden calculateupdates function below.
565 # case further in the overridden calculateupdates function below.
564 @eh.wrapfunction(merge, '_checkunknownfile')
566 @eh.wrapfunction(merge, '_checkunknownfile')
565 def overridecheckunknownfile(
567 def overridecheckunknownfile(
566 origfn, dirstate, wvfs, dircache, wctx, mctx, f, f2=None
568 origfn, dirstate, wvfs, dircache, wctx, mctx, f, f2=None
567 ):
569 ):
568 if lfutil.standin(dirstate.normalize(f)) in wctx:
570 if lfutil.standin(dirstate.normalize(f)) in wctx:
569 return False
571 return False
570 return origfn(dirstate, wvfs, dircache, wctx, mctx, f, f2)
572 return origfn(dirstate, wvfs, dircache, wctx, mctx, f, f2)
571
573
572
574
573 # The manifest merge handles conflicts on the manifest level. We want
575 # The manifest merge handles conflicts on the manifest level. We want
574 # to handle changes in largefile-ness of files at this level too.
576 # to handle changes in largefile-ness of files at this level too.
575 #
577 #
576 # The strategy is to run the original calculateupdates and then process
578 # The strategy is to run the original calculateupdates and then process
577 # the action list it outputs. There are two cases we need to deal with:
579 # the action list it outputs. There are two cases we need to deal with:
578 #
580 #
579 # 1. Normal file in p1, largefile in p2. Here the largefile is
581 # 1. Normal file in p1, largefile in p2. Here the largefile is
580 # detected via its standin file, which will enter the working copy
582 # detected via its standin file, which will enter the working copy
581 # with a "get" action. It is not "merge" since the standin is all
583 # with a "get" action. It is not "merge" since the standin is all
582 # Mercurial is concerned with at this level -- the link to the
584 # Mercurial is concerned with at this level -- the link to the
583 # existing normal file is not relevant here.
585 # existing normal file is not relevant here.
584 #
586 #
585 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
587 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
586 # since the largefile will be present in the working copy and
588 # since the largefile will be present in the working copy and
587 # different from the normal file in p2. Mercurial therefore
589 # different from the normal file in p2. Mercurial therefore
588 # triggers a merge action.
590 # triggers a merge action.
589 #
591 #
590 # In both cases, we prompt the user and emit new actions to either
592 # In both cases, we prompt the user and emit new actions to either
591 # remove the standin (if the normal file was kept) or to remove the
593 # remove the standin (if the normal file was kept) or to remove the
592 # normal file and get the standin (if the largefile was kept). The
594 # normal file and get the standin (if the largefile was kept). The
593 # default prompt answer is to use the largefile version since it was
595 # default prompt answer is to use the largefile version since it was
594 # presumably changed on purpose.
596 # presumably changed on purpose.
595 #
597 #
596 # Finally, the merge.applyupdates function will then take care of
598 # Finally, the merge.applyupdates function will then take care of
597 # writing the files into the working copy and lfcommands.updatelfiles
599 # writing the files into the working copy and lfcommands.updatelfiles
598 # will update the largefiles.
600 # will update the largefiles.
599 @eh.wrapfunction(merge, 'calculateupdates')
601 @eh.wrapfunction(merge, 'calculateupdates')
600 def overridecalculateupdates(
602 def overridecalculateupdates(
601 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
603 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
602 ):
604 ):
603 overwrite = force and not branchmerge
605 overwrite = force and not branchmerge
604 mresult = origfn(
606 mresult = origfn(
605 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
607 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
606 )
608 )
607
609
608 if overwrite:
610 if overwrite:
609 return mresult
611 return mresult
610
612
611 # Convert to dictionary with filename as key and action as value.
613 # Convert to dictionary with filename as key and action as value.
612 lfiles = set()
614 lfiles = set()
613 for f in mresult.files():
615 for f in mresult.files():
614 splitstandin = lfutil.splitstandin(f)
616 splitstandin = lfutil.splitstandin(f)
615 if splitstandin is not None and splitstandin in p1:
617 if splitstandin is not None and splitstandin in p1:
616 lfiles.add(splitstandin)
618 lfiles.add(splitstandin)
617 elif lfutil.standin(f) in p1:
619 elif lfutil.standin(f) in p1:
618 lfiles.add(f)
620 lfiles.add(f)
619
621
620 for lfile in sorted(lfiles):
622 for lfile in sorted(lfiles):
621 standin = lfutil.standin(lfile)
623 standin = lfutil.standin(lfile)
622 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
624 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
623 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
625 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
624
626
625 if sm in (ACTION_GET, ACTION_DELETED_CHANGED) and lm != ACTION_REMOVE:
627 if sm in (ACTION_GET, ACTION_DELETED_CHANGED) and lm != ACTION_REMOVE:
626 if sm == ACTION_DELETED_CHANGED:
628 if sm == ACTION_DELETED_CHANGED:
627 f1, f2, fa, move, anc = sargs
629 f1, f2, fa, move, anc = sargs
628 sargs = (p2[f2].flags(), False)
630 sargs = (p2[f2].flags(), False)
629 # Case 1: normal file in the working copy, largefile in
631 # Case 1: normal file in the working copy, largefile in
630 # the second parent
632 # the second parent
631 usermsg = (
633 usermsg = (
632 _(
634 _(
633 b'remote turned local normal file %s into a largefile\n'
635 b'remote turned local normal file %s into a largefile\n'
634 b'use (l)argefile or keep (n)ormal file?'
636 b'use (l)argefile or keep (n)ormal file?'
635 b'$$ &Largefile $$ &Normal file'
637 b'$$ &Largefile $$ &Normal file'
636 )
638 )
637 % lfile
639 % lfile
638 )
640 )
639 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
641 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
640 mresult.addfile(
642 mresult.addfile(
641 lfile, ACTION_REMOVE, None, b'replaced by standin'
643 lfile, ACTION_REMOVE, None, b'replaced by standin'
642 )
644 )
643 mresult.addfile(standin, ACTION_GET, sargs, b'replaces standin')
645 mresult.addfile(standin, ACTION_GET, sargs, b'replaces standin')
644 else: # keep local normal file
646 else: # keep local normal file
645 mresult.addfile(lfile, ACTION_KEEP, None, b'replaces standin')
647 mresult.addfile(lfile, ACTION_KEEP, None, b'replaces standin')
646 if branchmerge:
648 if branchmerge:
647 mresult.addfile(
649 mresult.addfile(
648 standin,
650 standin,
649 ACTION_KEEP,
651 ACTION_KEEP,
650 None,
652 None,
651 b'replaced by non-standin',
653 b'replaced by non-standin',
652 )
654 )
653 else:
655 else:
654 mresult.addfile(
656 mresult.addfile(
655 standin,
657 standin,
656 ACTION_REMOVE,
658 ACTION_REMOVE,
657 None,
659 None,
658 b'replaced by non-standin',
660 b'replaced by non-standin',
659 )
661 )
660 if lm in (ACTION_GET, ACTION_DELETED_CHANGED) and sm != ACTION_REMOVE:
662 if lm in (ACTION_GET, ACTION_DELETED_CHANGED) and sm != ACTION_REMOVE:
661 if lm == ACTION_DELETED_CHANGED:
663 if lm == ACTION_DELETED_CHANGED:
662 f1, f2, fa, move, anc = largs
664 f1, f2, fa, move, anc = largs
663 largs = (p2[f2].flags(), False)
665 largs = (p2[f2].flags(), False)
664 # Case 2: largefile in the working copy, normal file in
666 # Case 2: largefile in the working copy, normal file in
665 # the second parent
667 # the second parent
666 usermsg = (
668 usermsg = (
667 _(
669 _(
668 b'remote turned local largefile %s into a normal file\n'
670 b'remote turned local largefile %s into a normal file\n'
669 b'keep (l)argefile or use (n)ormal file?'
671 b'keep (l)argefile or use (n)ormal file?'
670 b'$$ &Largefile $$ &Normal file'
672 b'$$ &Largefile $$ &Normal file'
671 )
673 )
672 % lfile
674 % lfile
673 )
675 )
674 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
676 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
675 if branchmerge:
677 if branchmerge:
676 # largefile can be restored from standin safely
678 # largefile can be restored from standin safely
677 mresult.addfile(
679 mresult.addfile(
678 lfile,
680 lfile,
679 ACTION_KEEP,
681 ACTION_KEEP,
680 None,
682 None,
681 b'replaced by standin',
683 b'replaced by standin',
682 )
684 )
683 mresult.addfile(
685 mresult.addfile(
684 standin, ACTION_KEEP, None, b'replaces standin'
686 standin, ACTION_KEEP, None, b'replaces standin'
685 )
687 )
686 else:
688 else:
687 # "lfile" should be marked as "removed" without
689 # "lfile" should be marked as "removed" without
688 # removal of itself
690 # removal of itself
689 mresult.addfile(
691 mresult.addfile(
690 lfile,
692 lfile,
691 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
693 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
692 None,
694 None,
693 b'forget non-standin largefile',
695 b'forget non-standin largefile',
694 )
696 )
695
697
696 # linear-merge should treat this largefile as 're-added'
698 # linear-merge should treat this largefile as 're-added'
697 mresult.addfile(standin, ACTION_ADD, None, b'keep standin')
699 mresult.addfile(standin, ACTION_ADD, None, b'keep standin')
698 else: # pick remote normal file
700 else: # pick remote normal file
699 mresult.addfile(lfile, ACTION_GET, largs, b'replaces standin')
701 mresult.addfile(lfile, ACTION_GET, largs, b'replaces standin')
700 mresult.addfile(
702 mresult.addfile(
701 standin,
703 standin,
702 ACTION_REMOVE,
704 ACTION_REMOVE,
703 None,
705 None,
704 b'replaced by non-standin',
706 b'replaced by non-standin',
705 )
707 )
706
708
707 return mresult
709 return mresult
708
710
709
711
710 @eh.wrapfunction(mergestatemod, 'recordupdates')
712 @eh.wrapfunction(mergestatemod, 'recordupdates')
711 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
713 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
712 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
714 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
713 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
715 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
714 for lfile, args, msg in actions[MERGE_ACTION_LARGEFILE_MARK_REMOVED]:
716 for lfile, args, msg in actions[MERGE_ACTION_LARGEFILE_MARK_REMOVED]:
715 # this should be executed before 'orig', to execute 'remove'
717 # this should be executed before 'orig', to execute 'remove'
716 # before all other actions
718 # before all other actions
717 repo.dirstate.update_file(lfile, p1_tracked=True, wc_tracked=False)
719 repo.dirstate.update_file(lfile, p1_tracked=True, wc_tracked=False)
718 # make sure lfile doesn't get synclfdirstate'd as normal
720 # make sure lfile doesn't get synclfdirstate'd as normal
719 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
721 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
720
722
721 return orig(repo, actions, branchmerge, getfiledata)
723 return orig(repo, actions, branchmerge, getfiledata)
722
724
723
725
724 # Override filemerge to prompt the user about how they wish to merge
726 # Override filemerge to prompt the user about how they wish to merge
725 # largefiles. This will handle identical edits without prompting the user.
727 # largefiles. This will handle identical edits without prompting the user.
726 @eh.wrapfunction(filemerge, 'filemerge')
728 @eh.wrapfunction(filemerge, 'filemerge')
727 def overridefilemerge(
729 def overridefilemerge(
728 origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
730 origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
729 ):
731 ):
730 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
732 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
731 return origfn(repo, wctx, mynode, orig, fcd, fco, fca, labels=labels)
733 return origfn(repo, wctx, mynode, orig, fcd, fco, fca, labels=labels)
732
734
733 ahash = lfutil.readasstandin(fca).lower()
735 ahash = lfutil.readasstandin(fca).lower()
734 dhash = lfutil.readasstandin(fcd).lower()
736 dhash = lfutil.readasstandin(fcd).lower()
735 ohash = lfutil.readasstandin(fco).lower()
737 ohash = lfutil.readasstandin(fco).lower()
736 if (
738 if (
737 ohash != ahash
739 ohash != ahash
738 and ohash != dhash
740 and ohash != dhash
739 and (
741 and (
740 dhash == ahash
742 dhash == ahash
741 or repo.ui.promptchoice(
743 or repo.ui.promptchoice(
742 _(
744 _(
743 b'largefile %s has a merge conflict\nancestor was %s\n'
745 b'largefile %s has a merge conflict\nancestor was %s\n'
744 b'you can keep (l)ocal %s or take (o)ther %s.\n'
746 b'you can keep (l)ocal %s or take (o)ther %s.\n'
745 b'what do you want to do?'
747 b'what do you want to do?'
746 b'$$ &Local $$ &Other'
748 b'$$ &Local $$ &Other'
747 )
749 )
748 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
750 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
749 0,
751 0,
750 )
752 )
751 == 1
753 == 1
752 )
754 )
753 ):
755 ):
754 repo.wwrite(fcd.path(), fco.data(), fco.flags())
756 repo.wwrite(fcd.path(), fco.data(), fco.flags())
755 return 0, False
757 return 0, False
756
758
757
759
758 @eh.wrapfunction(copiesmod, 'pathcopies')
760 @eh.wrapfunction(copiesmod, 'pathcopies')
759 def copiespathcopies(orig, ctx1, ctx2, match=None):
761 def copiespathcopies(orig, ctx1, ctx2, match=None):
760 copies = orig(ctx1, ctx2, match=match)
762 copies = orig(ctx1, ctx2, match=match)
761 updated = {}
763 updated = {}
762
764
763 for k, v in copies.items():
765 for k, v in copies.items():
764 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
766 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
765
767
766 return updated
768 return updated
767
769
768
770
769 # Copy first changes the matchers to match standins instead of
771 # Copy first changes the matchers to match standins instead of
770 # largefiles. Then it overrides util.copyfile in that function it
772 # largefiles. Then it overrides util.copyfile in that function it
771 # checks if the destination largefile already exists. It also keeps a
773 # checks if the destination largefile already exists. It also keeps a
772 # list of copied files so that the largefiles can be copied and the
774 # list of copied files so that the largefiles can be copied and the
773 # dirstate updated.
775 # dirstate updated.
774 @eh.wrapfunction(cmdutil, 'copy')
776 @eh.wrapfunction(cmdutil, 'copy')
775 def overridecopy(orig, ui, repo, pats, opts, rename=False):
777 def overridecopy(orig, ui, repo, pats, opts, rename=False):
776 # doesn't remove largefile on rename
778 # doesn't remove largefile on rename
777 if len(pats) < 2:
779 if len(pats) < 2:
778 # this isn't legal, let the original function deal with it
780 # this isn't legal, let the original function deal with it
779 return orig(ui, repo, pats, opts, rename)
781 return orig(ui, repo, pats, opts, rename)
780
782
781 # This could copy both lfiles and normal files in one command,
783 # This could copy both lfiles and normal files in one command,
782 # but we don't want to do that. First replace their matcher to
784 # but we don't want to do that. First replace their matcher to
783 # only match normal files and run it, then replace it to just
785 # only match normal files and run it, then replace it to just
784 # match largefiles and run it again.
786 # match largefiles and run it again.
785 nonormalfiles = False
787 nonormalfiles = False
786 nolfiles = False
788 nolfiles = False
787 manifest = repo[None].manifest()
789 manifest = repo[None].manifest()
788
790
789 def normalfilesmatchfn(
791 def normalfilesmatchfn(
790 orig,
792 orig,
791 ctx,
793 ctx,
792 pats=(),
794 pats=(),
793 opts=None,
795 opts=None,
794 globbed=False,
796 globbed=False,
795 default=b'relpath',
797 default=b'relpath',
796 badfn=None,
798 badfn=None,
797 ):
799 ):
798 if opts is None:
800 if opts is None:
799 opts = {}
801 opts = {}
800 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
802 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
801 return composenormalfilematcher(match, manifest)
803 return composenormalfilematcher(match, manifest)
802
804
803 with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn):
805 with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn):
804 try:
806 try:
805 result = orig(ui, repo, pats, opts, rename)
807 result = orig(ui, repo, pats, opts, rename)
806 except error.Abort as e:
808 except error.Abort as e:
807 if e.message != _(b'no files to copy'):
809 if e.message != _(b'no files to copy'):
808 raise e
810 raise e
809 else:
811 else:
810 nonormalfiles = True
812 nonormalfiles = True
811 result = 0
813 result = 0
812
814
813 # The first rename can cause our current working directory to be removed.
815 # The first rename can cause our current working directory to be removed.
814 # In that case there is nothing left to copy/rename so just quit.
816 # In that case there is nothing left to copy/rename so just quit.
815 try:
817 try:
816 repo.getcwd()
818 repo.getcwd()
817 except OSError:
819 except OSError:
818 return result
820 return result
819
821
820 def makestandin(relpath):
822 def makestandin(relpath):
821 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
823 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
822 return repo.wvfs.join(lfutil.standin(path))
824 return repo.wvfs.join(lfutil.standin(path))
823
825
824 fullpats = scmutil.expandpats(pats)
826 fullpats = scmutil.expandpats(pats)
825 dest = fullpats[-1]
827 dest = fullpats[-1]
826
828
827 if os.path.isdir(dest):
829 if os.path.isdir(dest):
828 if not os.path.isdir(makestandin(dest)):
830 if not os.path.isdir(makestandin(dest)):
829 os.makedirs(makestandin(dest))
831 os.makedirs(makestandin(dest))
830
832
831 # When we call orig below it creates the standins but we don't add
833 # When we call orig below it creates the standins but we don't add
832 # them to the dir state until later so lock during that time.
834 # them to the dir state until later so lock during that time.
833 wlock = repo.wlock()
835 wlock = repo.wlock()
834
836
835 try:
837 try:
836 manifest = repo[None].manifest()
838 manifest = repo[None].manifest()
837
839
838 def overridematch(
840 def overridematch(
839 orig,
841 orig,
840 ctx,
842 ctx,
841 pats=(),
843 pats=(),
842 opts=None,
844 opts=None,
843 globbed=False,
845 globbed=False,
844 default=b'relpath',
846 default=b'relpath',
845 badfn=None,
847 badfn=None,
846 ):
848 ):
847 if opts is None:
849 if opts is None:
848 opts = {}
850 opts = {}
849 newpats = []
851 newpats = []
850 # The patterns were previously mangled to add the standin
852 # The patterns were previously mangled to add the standin
851 # directory; we need to remove that now
853 # directory; we need to remove that now
852 for pat in pats:
854 for pat in pats:
853 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
855 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
854 newpats.append(pat.replace(lfutil.shortname, b''))
856 newpats.append(pat.replace(lfutil.shortname, b''))
855 else:
857 else:
856 newpats.append(pat)
858 newpats.append(pat)
857 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
859 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
858 m = copy.copy(match)
860 m = copy.copy(match)
859 m._was_tampered_with = True
861 m._was_tampered_with = True
860 lfile = lambda f: lfutil.standin(f) in manifest
862 lfile = lambda f: lfutil.standin(f) in manifest
861 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
863 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
862 m._fileset = set(m._files)
864 m._fileset = set(m._files)
863 origmatchfn = m.matchfn
865 origmatchfn = m.matchfn
864
866
865 def matchfn(f):
867 def matchfn(f):
866 lfile = lfutil.splitstandin(f)
868 lfile = lfutil.splitstandin(f)
867 return (
869 return (
868 lfile is not None
870 lfile is not None
869 and (f in manifest)
871 and (f in manifest)
870 and origmatchfn(lfile)
872 and origmatchfn(lfile)
871 or None
873 or None
872 )
874 )
873
875
874 m.matchfn = matchfn
876 m.matchfn = matchfn
875 return m
877 return m
876
878
877 listpats = []
879 listpats = []
878 for pat in pats:
880 for pat in pats:
879 if matchmod.patkind(pat) is not None:
881 if matchmod.patkind(pat) is not None:
880 listpats.append(pat)
882 listpats.append(pat)
881 else:
883 else:
882 listpats.append(makestandin(pat))
884 listpats.append(makestandin(pat))
883
885
884 copiedfiles = []
886 copiedfiles = []
885
887
886 def overridecopyfile(orig, src, dest, *args, **kwargs):
888 def overridecopyfile(orig, src, dest, *args, **kwargs):
887 if lfutil.shortname in src and dest.startswith(
889 if lfutil.shortname in src and dest.startswith(
888 repo.wjoin(lfutil.shortname)
890 repo.wjoin(lfutil.shortname)
889 ):
891 ):
890 destlfile = dest.replace(lfutil.shortname, b'')
892 destlfile = dest.replace(lfutil.shortname, b'')
891 if not opts[b'force'] and os.path.exists(destlfile):
893 if not opts[b'force'] and os.path.exists(destlfile):
892 raise IOError(
894 raise IOError(
893 b'', _(b'destination largefile already exists')
895 b'', _(b'destination largefile already exists')
894 )
896 )
895 copiedfiles.append((src, dest))
897 copiedfiles.append((src, dest))
896 orig(src, dest, *args, **kwargs)
898 orig(src, dest, *args, **kwargs)
897
899
898 with extensions.wrappedfunction(util, 'copyfile', overridecopyfile):
900 with extensions.wrappedfunction(util, 'copyfile', overridecopyfile):
899 with extensions.wrappedfunction(scmutil, 'match', overridematch):
901 with extensions.wrappedfunction(scmutil, 'match', overridematch):
900 result += orig(ui, repo, listpats, opts, rename)
902 result += orig(ui, repo, listpats, opts, rename)
901
903
902 lfdirstate = lfutil.openlfdirstate(ui, repo)
904 lfdirstate = lfutil.openlfdirstate(ui, repo)
903 for src, dest in copiedfiles:
905 for src, dest in copiedfiles:
904 if lfutil.shortname in src and dest.startswith(
906 if lfutil.shortname in src and dest.startswith(
905 repo.wjoin(lfutil.shortname)
907 repo.wjoin(lfutil.shortname)
906 ):
908 ):
907 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
909 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
908 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
910 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
909 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
911 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
910 if not os.path.isdir(destlfiledir):
912 if not os.path.isdir(destlfiledir):
911 os.makedirs(destlfiledir)
913 os.makedirs(destlfiledir)
912 if rename:
914 if rename:
913 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
915 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
914
916
915 # The file is gone, but this deletes any empty parent
917 # The file is gone, but this deletes any empty parent
916 # directories as a side-effect.
918 # directories as a side-effect.
917 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
919 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
918 lfdirstate.set_untracked(srclfile)
920 lfdirstate.set_untracked(srclfile)
919 else:
921 else:
920 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
922 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
921
923
922 lfdirstate.set_tracked(destlfile)
924 lfdirstate.set_tracked(destlfile)
923 lfdirstate.write(repo.currenttransaction())
925 lfdirstate.write(repo.currenttransaction())
924 except error.Abort as e:
926 except error.Abort as e:
925 if e.message != _(b'no files to copy'):
927 if e.message != _(b'no files to copy'):
926 raise e
928 raise e
927 else:
929 else:
928 nolfiles = True
930 nolfiles = True
929 finally:
931 finally:
930 wlock.release()
932 wlock.release()
931
933
932 if nolfiles and nonormalfiles:
934 if nolfiles and nonormalfiles:
933 raise error.Abort(_(b'no files to copy'))
935 raise error.Abort(_(b'no files to copy'))
934
936
935 return result
937 return result
936
938
937
939
938 # When the user calls revert, we have to be careful to not revert any
940 # When the user calls revert, we have to be careful to not revert any
939 # changes to other largefiles accidentally. This means we have to keep
941 # changes to other largefiles accidentally. This means we have to keep
940 # track of the largefiles that are being reverted so we only pull down
942 # track of the largefiles that are being reverted so we only pull down
941 # the necessary largefiles.
943 # the necessary largefiles.
942 #
944 #
943 # Standins are only updated (to match the hash of largefiles) before
945 # Standins are only updated (to match the hash of largefiles) before
944 # commits. Update the standins then run the original revert, changing
946 # commits. Update the standins then run the original revert, changing
945 # the matcher to hit standins instead of largefiles. Based on the
947 # the matcher to hit standins instead of largefiles. Based on the
946 # resulting standins update the largefiles.
948 # resulting standins update the largefiles.
947 @eh.wrapfunction(cmdutil, 'revert')
949 @eh.wrapfunction(cmdutil, 'revert')
948 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
950 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
949 # Because we put the standins in a bad state (by updating them)
951 # Because we put the standins in a bad state (by updating them)
950 # and then return them to a correct state we need to lock to
952 # and then return them to a correct state we need to lock to
951 # prevent others from changing them in their incorrect state.
953 # prevent others from changing them in their incorrect state.
952 with repo.wlock(), repo.dirstate.running_status(repo):
954 with repo.wlock(), repo.dirstate.running_status(repo):
953 lfdirstate = lfutil.openlfdirstate(ui, repo)
955 lfdirstate = lfutil.openlfdirstate(ui, repo)
954 s = lfutil.lfdirstatestatus(lfdirstate, repo)
956 s = lfutil.lfdirstatestatus(lfdirstate, repo)
955 lfdirstate.write(repo.currenttransaction())
957 lfdirstate.write(repo.currenttransaction())
956 for lfile in s.modified:
958 for lfile in s.modified:
957 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
959 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
958 for lfile in s.deleted:
960 for lfile in s.deleted:
959 fstandin = lfutil.standin(lfile)
961 fstandin = lfutil.standin(lfile)
960 if repo.wvfs.exists(fstandin):
962 if repo.wvfs.exists(fstandin):
961 repo.wvfs.unlink(fstandin)
963 repo.wvfs.unlink(fstandin)
962
964
963 oldstandins = lfutil.getstandinsstate(repo)
965 oldstandins = lfutil.getstandinsstate(repo)
964
966
965 def overridematch(
967 def overridematch(
966 orig,
968 orig,
967 mctx,
969 mctx,
968 pats=(),
970 pats=(),
969 opts=None,
971 opts=None,
970 globbed=False,
972 globbed=False,
971 default=b'relpath',
973 default=b'relpath',
972 badfn=None,
974 badfn=None,
973 ):
975 ):
974 if opts is None:
976 if opts is None:
975 opts = {}
977 opts = {}
976 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
978 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
977 m = copy.copy(match)
979 m = copy.copy(match)
978 m._was_tampered_with = True
980 m._was_tampered_with = True
979
981
980 # revert supports recursing into subrepos, and though largefiles
982 # revert supports recursing into subrepos, and though largefiles
981 # currently doesn't work correctly in that case, this match is
983 # currently doesn't work correctly in that case, this match is
982 # called, so the lfdirstate above may not be the correct one for
984 # called, so the lfdirstate above may not be the correct one for
983 # this invocation of match.
985 # this invocation of match.
984 lfdirstate = lfutil.openlfdirstate(
986 lfdirstate = lfutil.openlfdirstate(
985 mctx.repo().ui, mctx.repo(), False
987 mctx.repo().ui, mctx.repo(), False
986 )
988 )
987
989
988 wctx = repo[None]
990 wctx = repo[None]
989 matchfiles = []
991 matchfiles = []
990 for f in m._files:
992 for f in m._files:
991 standin = lfutil.standin(f)
993 standin = lfutil.standin(f)
992 if standin in ctx or standin in mctx:
994 if standin in ctx or standin in mctx:
993 matchfiles.append(standin)
995 matchfiles.append(standin)
994 elif standin in wctx or lfdirstate.get_entry(f).removed:
996 elif standin in wctx or lfdirstate.get_entry(f).removed:
995 continue
997 continue
996 else:
998 else:
997 matchfiles.append(f)
999 matchfiles.append(f)
998 m._files = matchfiles
1000 m._files = matchfiles
999 m._fileset = set(m._files)
1001 m._fileset = set(m._files)
1000 origmatchfn = m.matchfn
1002 origmatchfn = m.matchfn
1001
1003
1002 def matchfn(f):
1004 def matchfn(f):
1003 lfile = lfutil.splitstandin(f)
1005 lfile = lfutil.splitstandin(f)
1004 if lfile is not None:
1006 if lfile is not None:
1005 return origmatchfn(lfile) and (f in ctx or f in mctx)
1007 return origmatchfn(lfile) and (f in ctx or f in mctx)
1006 return origmatchfn(f)
1008 return origmatchfn(f)
1007
1009
1008 m.matchfn = matchfn
1010 m.matchfn = matchfn
1009 return m
1011 return m
1010
1012
1011 with extensions.wrappedfunction(scmutil, 'match', overridematch):
1013 with extensions.wrappedfunction(scmutil, 'match', overridematch):
1012 orig(ui, repo, ctx, *pats, **opts)
1014 orig(ui, repo, ctx, *pats, **opts)
1013
1015
1014 newstandins = lfutil.getstandinsstate(repo)
1016 newstandins = lfutil.getstandinsstate(repo)
1015 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1017 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1016 # lfdirstate should be 'normallookup'-ed for updated files,
1018 # lfdirstate should be 'normallookup'-ed for updated files,
1017 # because reverting doesn't touch dirstate for 'normal' files
1019 # because reverting doesn't touch dirstate for 'normal' files
1018 # when target revision is explicitly specified: in such case,
1020 # when target revision is explicitly specified: in such case,
1019 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
1021 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
1020 # of target (standin) file.
1022 # of target (standin) file.
1021 lfcommands.updatelfiles(
1023 lfcommands.updatelfiles(
1022 ui, repo, filelist, printmessage=False, normallookup=True
1024 ui, repo, filelist, printmessage=False, normallookup=True
1023 )
1025 )
1024
1026
1025
1027
1026 # after pulling changesets, we need to take some extra care to get
1028 # after pulling changesets, we need to take some extra care to get
1027 # largefiles updated remotely
1029 # largefiles updated remotely
1028 @eh.wrapcommand(
1030 @eh.wrapcommand(
1029 b'pull',
1031 b'pull',
1030 opts=[
1032 opts=[
1031 (
1033 (
1032 b'',
1034 b'',
1033 b'all-largefiles',
1035 b'all-largefiles',
1034 None,
1036 None,
1035 _(b'download all pulled versions of largefiles (DEPRECATED)'),
1037 _(b'download all pulled versions of largefiles (DEPRECATED)'),
1036 ),
1038 ),
1037 (
1039 (
1038 b'',
1040 b'',
1039 b'lfrev',
1041 b'lfrev',
1040 [],
1042 [],
1041 _(b'download largefiles for these revisions'),
1043 _(b'download largefiles for these revisions'),
1042 _(b'REV'),
1044 _(b'REV'),
1043 ),
1045 ),
1044 ],
1046 ],
1045 )
1047 )
1046 def overridepull(orig, ui, repo, source=None, **opts):
1048 def overridepull(orig, ui, repo, source=None, **opts):
1047 revsprepull = len(repo)
1049 revsprepull = len(repo)
1048 if not source:
1050 if not source:
1049 source = b'default'
1051 source = b'default'
1050 repo.lfpullsource = source
1052 repo.lfpullsource = source
1051 result = orig(ui, repo, source, **opts)
1053 result = orig(ui, repo, source, **opts)
1052 revspostpull = len(repo)
1054 revspostpull = len(repo)
1053 lfrevs = opts.get('lfrev', [])
1055 lfrevs = opts.get('lfrev', [])
1054 if opts.get('all_largefiles'):
1056 if opts.get('all_largefiles'):
1055 lfrevs.append(b'pulled()')
1057 lfrevs.append(b'pulled()')
1056 if lfrevs and revspostpull > revsprepull:
1058 if lfrevs and revspostpull > revsprepull:
1057 numcached = 0
1059 numcached = 0
1058 repo.firstpulled = revsprepull # for pulled() revset expression
1060 repo.firstpulled = revsprepull # for pulled() revset expression
1059 try:
1061 try:
1060 for rev in logcmdutil.revrange(repo, lfrevs):
1062 for rev in logcmdutil.revrange(repo, lfrevs):
1061 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1063 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1062 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1064 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1063 numcached += len(cached)
1065 numcached += len(cached)
1064 finally:
1066 finally:
1065 del repo.firstpulled
1067 del repo.firstpulled
1066 ui.status(_(b"%d largefiles cached\n") % numcached)
1068 ui.status(_(b"%d largefiles cached\n") % numcached)
1067 return result
1069 return result
1068
1070
1069
1071
1070 @eh.wrapcommand(
1072 @eh.wrapcommand(
1071 b'push',
1073 b'push',
1072 opts=[
1074 opts=[
1073 (
1075 (
1074 b'',
1076 b'',
1075 b'lfrev',
1077 b'lfrev',
1076 [],
1078 [],
1077 _(b'upload largefiles for these revisions'),
1079 _(b'upload largefiles for these revisions'),
1078 _(b'REV'),
1080 _(b'REV'),
1079 )
1081 )
1080 ],
1082 ],
1081 )
1083 )
1082 def overridepush(orig, ui, repo, *args, **kwargs):
1084 def overridepush(orig, ui, repo, *args, **kwargs):
1083 """Override push command and store --lfrev parameters in opargs"""
1085 """Override push command and store --lfrev parameters in opargs"""
1084 lfrevs = kwargs.pop('lfrev', None)
1086 lfrevs = kwargs.pop('lfrev', None)
1085 if lfrevs:
1087 if lfrevs:
1086 opargs = kwargs.setdefault('opargs', {})
1088 opargs = kwargs.setdefault('opargs', {})
1087 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1089 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1088 return orig(ui, repo, *args, **kwargs)
1090 return orig(ui, repo, *args, **kwargs)
1089
1091
1090
1092
1091 @eh.wrapfunction(exchange, 'pushoperation')
1093 @eh.wrapfunction(exchange, 'pushoperation')
1092 def exchangepushoperation(orig, *args, **kwargs):
1094 def exchangepushoperation(orig, *args, **kwargs):
1093 """Override pushoperation constructor and store lfrevs parameter"""
1095 """Override pushoperation constructor and store lfrevs parameter"""
1094 lfrevs = kwargs.pop('lfrevs', None)
1096 lfrevs = kwargs.pop('lfrevs', None)
1095 pushop = orig(*args, **kwargs)
1097 pushop = orig(*args, **kwargs)
1096 pushop.lfrevs = lfrevs
1098 pushop.lfrevs = lfrevs
1097 return pushop
1099 return pushop
1098
1100
1099
1101
1100 @eh.revsetpredicate(b'pulled()')
1102 @eh.revsetpredicate(b'pulled()')
1101 def pulledrevsetsymbol(repo, subset, x):
1103 def pulledrevsetsymbol(repo, subset, x):
1102 """Changesets that just has been pulled.
1104 """Changesets that just has been pulled.
1103
1105
1104 Only available with largefiles from pull --lfrev expressions.
1106 Only available with largefiles from pull --lfrev expressions.
1105
1107
1106 .. container:: verbose
1108 .. container:: verbose
1107
1109
1108 Some examples:
1110 Some examples:
1109
1111
1110 - pull largefiles for all new changesets::
1112 - pull largefiles for all new changesets::
1111
1113
1112 hg pull -lfrev "pulled()"
1114 hg pull -lfrev "pulled()"
1113
1115
1114 - pull largefiles for all new branch heads::
1116 - pull largefiles for all new branch heads::
1115
1117
1116 hg pull -lfrev "head(pulled()) and not closed()"
1118 hg pull -lfrev "head(pulled()) and not closed()"
1117
1119
1118 """
1120 """
1119
1121
1120 try:
1122 try:
1121 firstpulled = repo.firstpulled
1123 firstpulled = repo.firstpulled
1122 except AttributeError:
1124 except AttributeError:
1123 raise error.Abort(_(b"pulled() only available in --lfrev"))
1125 raise error.Abort(_(b"pulled() only available in --lfrev"))
1124 return smartset.baseset([r for r in subset if r >= firstpulled])
1126 return smartset.baseset([r for r in subset if r >= firstpulled])
1125
1127
1126
1128
1127 @eh.wrapcommand(
1129 @eh.wrapcommand(
1128 b'clone',
1130 b'clone',
1129 opts=[
1131 opts=[
1130 (
1132 (
1131 b'',
1133 b'',
1132 b'all-largefiles',
1134 b'all-largefiles',
1133 None,
1135 None,
1134 _(b'download all versions of all largefiles'),
1136 _(b'download all versions of all largefiles'),
1135 )
1137 )
1136 ],
1138 ],
1137 )
1139 )
1138 def overrideclone(orig, ui, source, dest=None, **opts):
1140 def overrideclone(orig, ui, source, dest=None, **opts):
1139 d = dest
1141 d = dest
1140 if d is None:
1142 if d is None:
1141 d = hg.defaultdest(source)
1143 d = hg.defaultdest(source)
1142 if opts.get('all_largefiles') and not hg.islocal(d):
1144 if opts.get('all_largefiles') and urlutil.url(d).scheme not in (
1145 b'file',
1146 None,
1147 ):
1143 raise error.Abort(
1148 raise error.Abort(
1144 _(b'--all-largefiles is incompatible with non-local destination %s')
1149 _(b'--all-largefiles is incompatible with non-local destination %s')
1145 % d
1150 % d
1146 )
1151 )
1147
1152
1148 return orig(ui, source, dest, **opts)
1153 return orig(ui, source, dest, **opts)
1149
1154
1150
1155
1151 @eh.wrapfunction(hg, 'clone')
1156 @eh.wrapfunction(hg, 'clone')
1152 def hgclone(orig, ui, opts, *args, **kwargs):
1157 def hgclone(orig, ui, opts, *args, **kwargs):
1153 result = orig(ui, opts, *args, **kwargs)
1158 result = orig(ui, opts, *args, **kwargs)
1154
1159
1155 if result is not None:
1160 if result is not None:
1156 sourcerepo, destrepo = result
1161 sourcerepo, destrepo = result
1157 repo = destrepo.local()
1162 repo = destrepo.local()
1158
1163
1159 # When cloning to a remote repo (like through SSH), no repo is available
1164 # When cloning to a remote repo (like through SSH), no repo is available
1160 # from the peer. Therefore the largefiles can't be downloaded and the
1165 # from the peer. Therefore the largefiles can't be downloaded and the
1161 # hgrc can't be updated.
1166 # hgrc can't be updated.
1162 if not repo:
1167 if not repo:
1163 return result
1168 return result
1164
1169
1165 # Caching is implicitly limited to 'rev' option, since the dest repo was
1170 # Caching is implicitly limited to 'rev' option, since the dest repo was
1166 # truncated at that point. The user may expect a download count with
1171 # truncated at that point. The user may expect a download count with
1167 # this option, so attempt whether or not this is a largefile repo.
1172 # this option, so attempt whether or not this is a largefile repo.
1168 if opts.get(b'all_largefiles'):
1173 if opts.get(b'all_largefiles'):
1169 success, missing = lfcommands.downloadlfiles(ui, repo)
1174 success, missing = lfcommands.downloadlfiles(ui, repo)
1170
1175
1171 if missing != 0:
1176 if missing != 0:
1172 return None
1177 return None
1173
1178
1174 return result
1179 return result
1175
1180
1176
1181
1177 @eh.wrapcommand(b'rebase', extension=b'rebase')
1182 @eh.wrapcommand(b'rebase', extension=b'rebase')
1178 def overriderebasecmd(orig, ui, repo, **opts):
1183 def overriderebasecmd(orig, ui, repo, **opts):
1179 if not hasattr(repo, '_largefilesenabled'):
1184 if not hasattr(repo, '_largefilesenabled'):
1180 return orig(ui, repo, **opts)
1185 return orig(ui, repo, **opts)
1181
1186
1182 resuming = opts.get('continue')
1187 resuming = opts.get('continue')
1183 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1188 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1184 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1189 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1185 try:
1190 try:
1186 with ui.configoverride(
1191 with ui.configoverride(
1187 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1192 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1188 ):
1193 ):
1189 return orig(ui, repo, **opts)
1194 return orig(ui, repo, **opts)
1190 finally:
1195 finally:
1191 repo._lfstatuswriters.pop()
1196 repo._lfstatuswriters.pop()
1192 repo._lfcommithooks.pop()
1197 repo._lfcommithooks.pop()
1193
1198
1194
1199
1195 @eh.extsetup
1200 @eh.extsetup
1196 def overriderebase(ui):
1201 def overriderebase(ui):
1197 try:
1202 try:
1198 rebase = extensions.find(b'rebase')
1203 rebase = extensions.find(b'rebase')
1199 except KeyError:
1204 except KeyError:
1200 pass
1205 pass
1201 else:
1206 else:
1202
1207
1203 def _dorebase(orig, *args, **kwargs):
1208 def _dorebase(orig, *args, **kwargs):
1204 kwargs['inmemory'] = False
1209 kwargs['inmemory'] = False
1205 return orig(*args, **kwargs)
1210 return orig(*args, **kwargs)
1206
1211
1207 extensions.wrapfunction(rebase, '_dorebase', _dorebase)
1212 extensions.wrapfunction(rebase, '_dorebase', _dorebase)
1208
1213
1209
1214
1210 @eh.wrapcommand(b'archive')
1215 @eh.wrapcommand(b'archive')
1211 def overridearchivecmd(orig, ui, repo, dest, **opts):
1216 def overridearchivecmd(orig, ui, repo, dest, **opts):
1212 with lfstatus(repo.unfiltered()):
1217 with lfstatus(repo.unfiltered()):
1213 return orig(ui, repo.unfiltered(), dest, **opts)
1218 return orig(ui, repo.unfiltered(), dest, **opts)
1214
1219
1215
1220
1216 @eh.wrapfunction(webcommands, 'archive')
1221 @eh.wrapfunction(webcommands, 'archive')
1217 def hgwebarchive(orig, web):
1222 def hgwebarchive(orig, web):
1218 with lfstatus(web.repo):
1223 with lfstatus(web.repo):
1219 return orig(web)
1224 return orig(web)
1220
1225
1221
1226
1222 @eh.wrapfunction(archival, 'archive')
1227 @eh.wrapfunction(archival, 'archive')
1223 def overridearchive(
1228 def overridearchive(
1224 orig,
1229 orig,
1225 repo,
1230 repo,
1226 dest,
1231 dest,
1227 node,
1232 node,
1228 kind,
1233 kind,
1229 decode=True,
1234 decode=True,
1230 match: Optional[matchmod.basematcher] = None,
1235 match: Optional[matchmod.basematcher] = None,
1231 prefix=b'',
1236 prefix=b'',
1232 mtime=None,
1237 mtime=None,
1233 subrepos=None,
1238 subrepos=None,
1234 ):
1239 ):
1235 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1240 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1236 # unfiltered repo's attr, so check that as well.
1241 # unfiltered repo's attr, so check that as well.
1237 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1242 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1238 return orig(
1243 return orig(
1239 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1244 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1240 )
1245 )
1241
1246
1242 # No need to lock because we are only reading history and
1247 # No need to lock because we are only reading history and
1243 # largefile caches, neither of which are modified.
1248 # largefile caches, neither of which are modified.
1244 if node is not None:
1249 if node is not None:
1245 lfcommands.cachelfiles(repo.ui, repo, node)
1250 lfcommands.cachelfiles(repo.ui, repo, node)
1246
1251
1247 if kind not in archival.archivers:
1252 if kind not in archival.archivers:
1248 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1253 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1249
1254
1250 ctx = repo[node]
1255 ctx = repo[node]
1251
1256
1252 if kind == b'files':
1257 if kind == b'files':
1253 if prefix:
1258 if prefix:
1254 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1259 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1255 else:
1260 else:
1256 prefix = archival.tidyprefix(dest, kind, prefix)
1261 prefix = archival.tidyprefix(dest, kind, prefix)
1257
1262
1258 if not match:
1263 if not match:
1259 match = scmutil.matchall(repo)
1264 match = scmutil.matchall(repo)
1260
1265
1261 def write(name, mode, islink, getdata):
1266 def write(name, mode, islink, getdata):
1262 if not match(name):
1267 if not match(name):
1263 return
1268 return
1264 data = getdata()
1269 data = getdata()
1265 if decode:
1270 if decode:
1266 data = repo.wwritedata(name, data)
1271 data = repo.wwritedata(name, data)
1267 archiver.addfile(prefix + name, mode, islink, data)
1272 archiver.addfile(prefix + name, mode, islink, data)
1268
1273
1269 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1274 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1270
1275
1271 if repo.ui.configbool(b"ui", b"archivemeta"):
1276 if repo.ui.configbool(b"ui", b"archivemeta"):
1272 write(
1277 write(
1273 b'.hg_archival.txt',
1278 b'.hg_archival.txt',
1274 0o644,
1279 0o644,
1275 False,
1280 False,
1276 lambda: archival.buildmetadata(ctx),
1281 lambda: archival.buildmetadata(ctx),
1277 )
1282 )
1278
1283
1279 for f in ctx:
1284 for f in ctx:
1280 ff = ctx.flags(f)
1285 ff = ctx.flags(f)
1281 getdata = ctx[f].data
1286 getdata = ctx[f].data
1282 lfile = lfutil.splitstandin(f)
1287 lfile = lfutil.splitstandin(f)
1283 if lfile is not None:
1288 if lfile is not None:
1284 if node is not None:
1289 if node is not None:
1285 path = lfutil.findfile(repo, getdata().strip())
1290 path = lfutil.findfile(repo, getdata().strip())
1286
1291
1287 if path is None:
1292 if path is None:
1288 raise error.Abort(
1293 raise error.Abort(
1289 _(
1294 _(
1290 b'largefile %s not found in repo store or system cache'
1295 b'largefile %s not found in repo store or system cache'
1291 )
1296 )
1292 % lfile
1297 % lfile
1293 )
1298 )
1294 else:
1299 else:
1295 path = lfile
1300 path = lfile
1296
1301
1297 f = lfile
1302 f = lfile
1298
1303
1299 getdata = lambda: util.readfile(path)
1304 getdata = lambda: util.readfile(path)
1300 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1305 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1301
1306
1302 if subrepos:
1307 if subrepos:
1303 for subpath in sorted(ctx.substate):
1308 for subpath in sorted(ctx.substate):
1304 sub = ctx.workingsub(subpath)
1309 sub = ctx.workingsub(subpath)
1305 submatch = matchmod.subdirmatcher(subpath, match)
1310 submatch = matchmod.subdirmatcher(subpath, match)
1306 subprefix = prefix + subpath + b'/'
1311 subprefix = prefix + subpath + b'/'
1307
1312
1308 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1313 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1309 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1314 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1310 # allow only hgsubrepos to set this, instead of the current scheme
1315 # allow only hgsubrepos to set this, instead of the current scheme
1311 # where the parent sets this for the child.
1316 # where the parent sets this for the child.
1312 with (
1317 with (
1313 hasattr(sub, '_repo')
1318 hasattr(sub, '_repo')
1314 and lfstatus(sub._repo)
1319 and lfstatus(sub._repo)
1315 or util.nullcontextmanager()
1320 or util.nullcontextmanager()
1316 ):
1321 ):
1317 sub.archive(archiver, subprefix, submatch)
1322 sub.archive(archiver, subprefix, submatch)
1318
1323
1319 archiver.done()
1324 archiver.done()
1320
1325
1321
1326
1322 @eh.wrapfunction(subrepo.hgsubrepo, 'archive')
1327 @eh.wrapfunction(subrepo.hgsubrepo, 'archive')
1323 def hgsubrepoarchive(
1328 def hgsubrepoarchive(
1324 orig, repo, archiver, prefix, match: matchmod.basematcher, decode=True
1329 orig, repo, archiver, prefix, match: matchmod.basematcher, decode=True
1325 ):
1330 ):
1326 lfenabled = hasattr(repo._repo, '_largefilesenabled')
1331 lfenabled = hasattr(repo._repo, '_largefilesenabled')
1327 if not lfenabled or not repo._repo.lfstatus:
1332 if not lfenabled or not repo._repo.lfstatus:
1328 return orig(repo, archiver, prefix, match, decode)
1333 return orig(repo, archiver, prefix, match, decode)
1329
1334
1330 repo._get(repo._state + (b'hg',))
1335 repo._get(repo._state + (b'hg',))
1331 rev = repo._state[1]
1336 rev = repo._state[1]
1332 ctx = repo._repo[rev]
1337 ctx = repo._repo[rev]
1333
1338
1334 if ctx.node() is not None:
1339 if ctx.node() is not None:
1335 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1340 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1336
1341
1337 def write(name, mode, islink, getdata):
1342 def write(name, mode, islink, getdata):
1338 # At this point, the standin has been replaced with the largefile name,
1343 # At this point, the standin has been replaced with the largefile name,
1339 # so the normal matcher works here without the lfutil variants.
1344 # so the normal matcher works here without the lfutil variants.
1340 if not match(f):
1345 if not match(f):
1341 return
1346 return
1342 data = getdata()
1347 data = getdata()
1343 if decode:
1348 if decode:
1344 data = repo._repo.wwritedata(name, data)
1349 data = repo._repo.wwritedata(name, data)
1345
1350
1346 archiver.addfile(prefix + name, mode, islink, data)
1351 archiver.addfile(prefix + name, mode, islink, data)
1347
1352
1348 for f in ctx:
1353 for f in ctx:
1349 ff = ctx.flags(f)
1354 ff = ctx.flags(f)
1350 getdata = ctx[f].data
1355 getdata = ctx[f].data
1351 lfile = lfutil.splitstandin(f)
1356 lfile = lfutil.splitstandin(f)
1352 if lfile is not None:
1357 if lfile is not None:
1353 if ctx.node() is not None:
1358 if ctx.node() is not None:
1354 path = lfutil.findfile(repo._repo, getdata().strip())
1359 path = lfutil.findfile(repo._repo, getdata().strip())
1355
1360
1356 if path is None:
1361 if path is None:
1357 raise error.Abort(
1362 raise error.Abort(
1358 _(
1363 _(
1359 b'largefile %s not found in repo store or system cache'
1364 b'largefile %s not found in repo store or system cache'
1360 )
1365 )
1361 % lfile
1366 % lfile
1362 )
1367 )
1363 else:
1368 else:
1364 path = lfile
1369 path = lfile
1365
1370
1366 f = lfile
1371 f = lfile
1367
1372
1368 getdata = lambda: util.readfile(os.path.join(prefix, path))
1373 getdata = lambda: util.readfile(os.path.join(prefix, path))
1369
1374
1370 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1375 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1371
1376
1372 for subpath in sorted(ctx.substate):
1377 for subpath in sorted(ctx.substate):
1373 sub = ctx.workingsub(subpath)
1378 sub = ctx.workingsub(subpath)
1374 submatch = matchmod.subdirmatcher(subpath, match)
1379 submatch = matchmod.subdirmatcher(subpath, match)
1375 subprefix = prefix + subpath + b'/'
1380 subprefix = prefix + subpath + b'/'
1376 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1381 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1377 # infer and possibly set lfstatus at the top of this function. That
1382 # infer and possibly set lfstatus at the top of this function. That
1378 # would allow only hgsubrepos to set this, instead of the current scheme
1383 # would allow only hgsubrepos to set this, instead of the current scheme
1379 # where the parent sets this for the child.
1384 # where the parent sets this for the child.
1380 with (
1385 with (
1381 hasattr(sub, '_repo')
1386 hasattr(sub, '_repo')
1382 and lfstatus(sub._repo)
1387 and lfstatus(sub._repo)
1383 or util.nullcontextmanager()
1388 or util.nullcontextmanager()
1384 ):
1389 ):
1385 sub.archive(archiver, subprefix, submatch, decode)
1390 sub.archive(archiver, subprefix, submatch, decode)
1386
1391
1387
1392
1388 # If a largefile is modified, the change is not reflected in its
1393 # If a largefile is modified, the change is not reflected in its
1389 # standin until a commit. cmdutil.bailifchanged() raises an exception
1394 # standin until a commit. cmdutil.bailifchanged() raises an exception
1390 # if the repo has uncommitted changes. Wrap it to also check if
1395 # if the repo has uncommitted changes. Wrap it to also check if
1391 # largefiles were changed. This is used by bisect, backout and fetch.
1396 # largefiles were changed. This is used by bisect, backout and fetch.
1392 @eh.wrapfunction(cmdutil, 'bailifchanged')
1397 @eh.wrapfunction(cmdutil, 'bailifchanged')
1393 def overridebailifchanged(orig, repo, *args, **kwargs):
1398 def overridebailifchanged(orig, repo, *args, **kwargs):
1394 orig(repo, *args, **kwargs)
1399 orig(repo, *args, **kwargs)
1395 with lfstatus(repo):
1400 with lfstatus(repo):
1396 s = repo.status()
1401 s = repo.status()
1397 if s.modified or s.added or s.removed or s.deleted:
1402 if s.modified or s.added or s.removed or s.deleted:
1398 raise error.Abort(_(b'uncommitted changes'))
1403 raise error.Abort(_(b'uncommitted changes'))
1399
1404
1400
1405
1401 @eh.wrapfunction(cmdutil, 'postcommitstatus')
1406 @eh.wrapfunction(cmdutil, 'postcommitstatus')
1402 def postcommitstatus(orig, repo, *args, **kwargs):
1407 def postcommitstatus(orig, repo, *args, **kwargs):
1403 with lfstatus(repo):
1408 with lfstatus(repo):
1404 return orig(repo, *args, **kwargs)
1409 return orig(repo, *args, **kwargs)
1405
1410
1406
1411
1407 @eh.wrapfunction(cmdutil, 'forget')
1412 @eh.wrapfunction(cmdutil, 'forget')
1408 def cmdutilforget(
1413 def cmdutilforget(
1409 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1414 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1410 ):
1415 ):
1411 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1416 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1412 bad, forgot = orig(
1417 bad, forgot = orig(
1413 ui,
1418 ui,
1414 repo,
1419 repo,
1415 normalmatcher,
1420 normalmatcher,
1416 prefix,
1421 prefix,
1417 uipathfn,
1422 uipathfn,
1418 explicitonly,
1423 explicitonly,
1419 dryrun,
1424 dryrun,
1420 interactive,
1425 interactive,
1421 )
1426 )
1422 m = composelargefilematcher(match, repo[None].manifest())
1427 m = composelargefilematcher(match, repo[None].manifest())
1423
1428
1424 with lfstatus(repo):
1429 with lfstatus(repo):
1425 s = repo.status(match=m, clean=True)
1430 s = repo.status(match=m, clean=True)
1426 manifest = repo[None].manifest()
1431 manifest = repo[None].manifest()
1427 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1432 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1428 forget = [f for f in forget if lfutil.standin(f) in manifest]
1433 forget = [f for f in forget if lfutil.standin(f) in manifest]
1429
1434
1430 for f in forget:
1435 for f in forget:
1431 fstandin = lfutil.standin(f)
1436 fstandin = lfutil.standin(f)
1432 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1437 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1433 ui.warn(
1438 ui.warn(
1434 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1439 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1435 )
1440 )
1436 bad.append(f)
1441 bad.append(f)
1437
1442
1438 for f in forget:
1443 for f in forget:
1439 if ui.verbose or not m.exact(f):
1444 if ui.verbose or not m.exact(f):
1440 ui.status(_(b'removing %s\n') % uipathfn(f))
1445 ui.status(_(b'removing %s\n') % uipathfn(f))
1441
1446
1442 # Need to lock because standin files are deleted then removed from the
1447 # Need to lock because standin files are deleted then removed from the
1443 # repository and we could race in-between.
1448 # repository and we could race in-between.
1444 with repo.wlock():
1449 with repo.wlock():
1445 lfdirstate = lfutil.openlfdirstate(ui, repo)
1450 lfdirstate = lfutil.openlfdirstate(ui, repo)
1446 for f in forget:
1451 for f in forget:
1447 lfdirstate.set_untracked(f)
1452 lfdirstate.set_untracked(f)
1448 lfdirstate.write(repo.currenttransaction())
1453 lfdirstate.write(repo.currenttransaction())
1449 standins = [lfutil.standin(f) for f in forget]
1454 standins = [lfutil.standin(f) for f in forget]
1450 for f in standins:
1455 for f in standins:
1451 repo.wvfs.unlinkpath(f, ignoremissing=True)
1456 repo.wvfs.unlinkpath(f, ignoremissing=True)
1452 rejected = repo[None].forget(standins)
1457 rejected = repo[None].forget(standins)
1453
1458
1454 bad.extend(f for f in rejected if f in m.files())
1459 bad.extend(f for f in rejected if f in m.files())
1455 forgot.extend(f for f in forget if f not in rejected)
1460 forgot.extend(f for f in forget if f not in rejected)
1456 return bad, forgot
1461 return bad, forgot
1457
1462
1458
1463
1459 def _getoutgoings(repo, other, missing, addfunc):
1464 def _getoutgoings(repo, other, missing, addfunc):
1460 """get pairs of filename and largefile hash in outgoing revisions
1465 """get pairs of filename and largefile hash in outgoing revisions
1461 in 'missing'.
1466 in 'missing'.
1462
1467
1463 largefiles already existing on 'other' repository are ignored.
1468 largefiles already existing on 'other' repository are ignored.
1464
1469
1465 'addfunc' is invoked with each unique pairs of filename and
1470 'addfunc' is invoked with each unique pairs of filename and
1466 largefile hash value.
1471 largefile hash value.
1467 """
1472 """
1468 knowns = set()
1473 knowns = set()
1469 lfhashes = set()
1474 lfhashes = set()
1470
1475
1471 def dedup(fn, lfhash):
1476 def dedup(fn, lfhash):
1472 k = (fn, lfhash)
1477 k = (fn, lfhash)
1473 if k not in knowns:
1478 if k not in knowns:
1474 knowns.add(k)
1479 knowns.add(k)
1475 lfhashes.add(lfhash)
1480 lfhashes.add(lfhash)
1476
1481
1477 lfutil.getlfilestoupload(repo, missing, dedup)
1482 lfutil.getlfilestoupload(repo, missing, dedup)
1478 if lfhashes:
1483 if lfhashes:
1479 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1484 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1480 for fn, lfhash in knowns:
1485 for fn, lfhash in knowns:
1481 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1486 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1482 addfunc(fn, lfhash)
1487 addfunc(fn, lfhash)
1483
1488
1484
1489
1485 def outgoinghook(ui, repo, other, opts, missing):
1490 def outgoinghook(ui, repo, other, opts, missing):
1486 if opts.pop(b'large', None):
1491 if opts.pop(b'large', None):
1487 lfhashes = set()
1492 lfhashes = set()
1488 if ui.debugflag:
1493 if ui.debugflag:
1489 toupload = {}
1494 toupload = {}
1490
1495
1491 def addfunc(fn, lfhash):
1496 def addfunc(fn, lfhash):
1492 if fn not in toupload:
1497 if fn not in toupload:
1493 toupload[fn] = [] # pytype: disable=unsupported-operands
1498 toupload[fn] = [] # pytype: disable=unsupported-operands
1494 toupload[fn].append(lfhash)
1499 toupload[fn].append(lfhash)
1495 lfhashes.add(lfhash)
1500 lfhashes.add(lfhash)
1496
1501
1497 def showhashes(fn):
1502 def showhashes(fn):
1498 for lfhash in sorted(toupload[fn]):
1503 for lfhash in sorted(toupload[fn]):
1499 ui.debug(b' %s\n' % lfhash)
1504 ui.debug(b' %s\n' % lfhash)
1500
1505
1501 else:
1506 else:
1502 toupload = set()
1507 toupload = set()
1503
1508
1504 def addfunc(fn, lfhash):
1509 def addfunc(fn, lfhash):
1505 toupload.add(fn)
1510 toupload.add(fn)
1506 lfhashes.add(lfhash)
1511 lfhashes.add(lfhash)
1507
1512
1508 def showhashes(fn):
1513 def showhashes(fn):
1509 pass
1514 pass
1510
1515
1511 _getoutgoings(repo, other, missing, addfunc)
1516 _getoutgoings(repo, other, missing, addfunc)
1512
1517
1513 if not toupload:
1518 if not toupload:
1514 ui.status(_(b'largefiles: no files to upload\n'))
1519 ui.status(_(b'largefiles: no files to upload\n'))
1515 else:
1520 else:
1516 ui.status(
1521 ui.status(
1517 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1522 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1518 )
1523 )
1519 for file in sorted(toupload):
1524 for file in sorted(toupload):
1520 ui.status(lfutil.splitstandin(file) + b'\n')
1525 ui.status(lfutil.splitstandin(file) + b'\n')
1521 showhashes(file)
1526 showhashes(file)
1522 ui.status(b'\n')
1527 ui.status(b'\n')
1523
1528
1524
1529
1525 @eh.wrapcommand(
1530 @eh.wrapcommand(
1526 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1531 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1527 )
1532 )
1528 def _outgoingcmd(orig, *args, **kwargs):
1533 def _outgoingcmd(orig, *args, **kwargs):
1529 # Nothing to do here other than add the extra help option- the hook above
1534 # Nothing to do here other than add the extra help option- the hook above
1530 # processes it.
1535 # processes it.
1531 return orig(*args, **kwargs)
1536 return orig(*args, **kwargs)
1532
1537
1533
1538
1534 def summaryremotehook(ui, repo, opts, changes):
1539 def summaryremotehook(ui, repo, opts, changes):
1535 largeopt = opts.get(b'large', False)
1540 largeopt = opts.get(b'large', False)
1536 if changes is None:
1541 if changes is None:
1537 if largeopt:
1542 if largeopt:
1538 return (False, True) # only outgoing check is needed
1543 return (False, True) # only outgoing check is needed
1539 else:
1544 else:
1540 return (False, False)
1545 return (False, False)
1541 elif largeopt:
1546 elif largeopt:
1542 url, branch, peer, outgoing = changes[1]
1547 url, branch, peer, outgoing = changes[1]
1543 if peer is None:
1548 if peer is None:
1544 # i18n: column positioning for "hg summary"
1549 # i18n: column positioning for "hg summary"
1545 ui.status(_(b'largefiles: (no remote repo)\n'))
1550 ui.status(_(b'largefiles: (no remote repo)\n'))
1546 return
1551 return
1547
1552
1548 toupload = set()
1553 toupload = set()
1549 lfhashes = set()
1554 lfhashes = set()
1550
1555
1551 def addfunc(fn, lfhash):
1556 def addfunc(fn, lfhash):
1552 toupload.add(fn)
1557 toupload.add(fn)
1553 lfhashes.add(lfhash)
1558 lfhashes.add(lfhash)
1554
1559
1555 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1560 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1556
1561
1557 if not toupload:
1562 if not toupload:
1558 # i18n: column positioning for "hg summary"
1563 # i18n: column positioning for "hg summary"
1559 ui.status(_(b'largefiles: (no files to upload)\n'))
1564 ui.status(_(b'largefiles: (no files to upload)\n'))
1560 else:
1565 else:
1561 # i18n: column positioning for "hg summary"
1566 # i18n: column positioning for "hg summary"
1562 ui.status(
1567 ui.status(
1563 _(b'largefiles: %d entities for %d files to upload\n')
1568 _(b'largefiles: %d entities for %d files to upload\n')
1564 % (len(lfhashes), len(toupload))
1569 % (len(lfhashes), len(toupload))
1565 )
1570 )
1566
1571
1567
1572
1568 @eh.wrapcommand(
1573 @eh.wrapcommand(
1569 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1574 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1570 )
1575 )
1571 def overridesummary(orig, ui, repo, *pats, **opts):
1576 def overridesummary(orig, ui, repo, *pats, **opts):
1572 with lfstatus(repo):
1577 with lfstatus(repo):
1573 orig(ui, repo, *pats, **opts)
1578 orig(ui, repo, *pats, **opts)
1574
1579
1575
1580
1576 @eh.wrapfunction(scmutil, 'addremove')
1581 @eh.wrapfunction(scmutil, 'addremove')
1577 def scmutiladdremove(
1582 def scmutiladdremove(
1578 orig,
1583 orig,
1579 repo,
1584 repo,
1580 matcher,
1585 matcher,
1581 prefix,
1586 prefix,
1582 uipathfn,
1587 uipathfn,
1583 opts=None,
1588 opts=None,
1584 open_tr=None,
1589 open_tr=None,
1585 ):
1590 ):
1586 if opts is None:
1591 if opts is None:
1587 opts = {}
1592 opts = {}
1588 if not lfutil.islfilesrepo(repo):
1593 if not lfutil.islfilesrepo(repo):
1589 return orig(repo, matcher, prefix, uipathfn, opts, open_tr=open_tr)
1594 return orig(repo, matcher, prefix, uipathfn, opts, open_tr=open_tr)
1590
1595
1591 # open the transaction and changing_files context
1596 # open the transaction and changing_files context
1592 if open_tr is not None:
1597 if open_tr is not None:
1593 open_tr()
1598 open_tr()
1594
1599
1595 # Get the list of missing largefiles so we can remove them
1600 # Get the list of missing largefiles so we can remove them
1596 with repo.dirstate.running_status(repo):
1601 with repo.dirstate.running_status(repo):
1597 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1602 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1598 unsure, s, mtime_boundary = lfdirstate.status(
1603 unsure, s, mtime_boundary = lfdirstate.status(
1599 matchmod.always(),
1604 matchmod.always(),
1600 subrepos=[],
1605 subrepos=[],
1601 ignored=False,
1606 ignored=False,
1602 clean=False,
1607 clean=False,
1603 unknown=False,
1608 unknown=False,
1604 )
1609 )
1605
1610
1606 # Call into the normal remove code, but the removing of the standin, we want
1611 # Call into the normal remove code, but the removing of the standin, we want
1607 # to have handled by original addremove. Monkey patching here makes sure
1612 # to have handled by original addremove. Monkey patching here makes sure
1608 # we don't remove the standin in the largefiles code, preventing a very
1613 # we don't remove the standin in the largefiles code, preventing a very
1609 # confused state later.
1614 # confused state later.
1610 if s.deleted:
1615 if s.deleted:
1611 m = copy.copy(matcher)
1616 m = copy.copy(matcher)
1612 m._was_tampered_with = True
1617 m._was_tampered_with = True
1613
1618
1614 # The m._files and m._map attributes are not changed to the deleted list
1619 # The m._files and m._map attributes are not changed to the deleted list
1615 # because that affects the m.exact() test, which in turn governs whether
1620 # because that affects the m.exact() test, which in turn governs whether
1616 # or not the file name is printed, and how. Simply limit the original
1621 # or not the file name is printed, and how. Simply limit the original
1617 # matches to those in the deleted status list.
1622 # matches to those in the deleted status list.
1618 matchfn = m.matchfn
1623 matchfn = m.matchfn
1619 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1624 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1620
1625
1621 removelargefiles(
1626 removelargefiles(
1622 repo.ui,
1627 repo.ui,
1623 repo,
1628 repo,
1624 True,
1629 True,
1625 m,
1630 m,
1626 uipathfn,
1631 uipathfn,
1627 opts.get(b'dry_run'),
1632 opts.get(b'dry_run'),
1628 **pycompat.strkwargs(opts)
1633 **pycompat.strkwargs(opts)
1629 )
1634 )
1630 # Call into the normal add code, and any files that *should* be added as
1635 # Call into the normal add code, and any files that *should* be added as
1631 # largefiles will be
1636 # largefiles will be
1632 added, bad = addlargefiles(
1637 added, bad = addlargefiles(
1633 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1638 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1634 )
1639 )
1635 # Now that we've handled largefiles, hand off to the original addremove
1640 # Now that we've handled largefiles, hand off to the original addremove
1636 # function to take care of the rest. Make sure it doesn't do anything with
1641 # function to take care of the rest. Make sure it doesn't do anything with
1637 # largefiles by passing a matcher that will ignore them.
1642 # largefiles by passing a matcher that will ignore them.
1638 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1643 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1639
1644
1640 return orig(repo, matcher, prefix, uipathfn, opts, open_tr=open_tr)
1645 return orig(repo, matcher, prefix, uipathfn, opts, open_tr=open_tr)
1641
1646
1642
1647
1643 # Calling purge with --all will cause the largefiles to be deleted.
1648 # Calling purge with --all will cause the largefiles to be deleted.
1644 # Override repo.status to prevent this from happening.
1649 # Override repo.status to prevent this from happening.
1645 @eh.wrapcommand(b'purge')
1650 @eh.wrapcommand(b'purge')
1646 def overridepurge(orig, ui, repo, *dirs, **opts):
1651 def overridepurge(orig, ui, repo, *dirs, **opts):
1647 # XXX Monkey patching a repoview will not work. The assigned attribute will
1652 # XXX Monkey patching a repoview will not work. The assigned attribute will
1648 # be set on the unfiltered repo, but we will only lookup attributes in the
1653 # be set on the unfiltered repo, but we will only lookup attributes in the
1649 # unfiltered repo if the lookup in the repoview object itself fails. As the
1654 # unfiltered repo if the lookup in the repoview object itself fails. As the
1650 # monkey patched method exists on the repoview class the lookup will not
1655 # monkey patched method exists on the repoview class the lookup will not
1651 # fail. As a result, the original version will shadow the monkey patched
1656 # fail. As a result, the original version will shadow the monkey patched
1652 # one, defeating the monkey patch.
1657 # one, defeating the monkey patch.
1653 #
1658 #
1654 # As a work around we use an unfiltered repo here. We should do something
1659 # As a work around we use an unfiltered repo here. We should do something
1655 # cleaner instead.
1660 # cleaner instead.
1656 repo = repo.unfiltered()
1661 repo = repo.unfiltered()
1657 oldstatus = repo.status
1662 oldstatus = repo.status
1658
1663
1659 def overridestatus(
1664 def overridestatus(
1660 node1=b'.',
1665 node1=b'.',
1661 node2=None,
1666 node2=None,
1662 match=None,
1667 match=None,
1663 ignored=False,
1668 ignored=False,
1664 clean=False,
1669 clean=False,
1665 unknown=False,
1670 unknown=False,
1666 listsubrepos=False,
1671 listsubrepos=False,
1667 ):
1672 ):
1668 r = oldstatus(
1673 r = oldstatus(
1669 node1, node2, match, ignored, clean, unknown, listsubrepos
1674 node1, node2, match, ignored, clean, unknown, listsubrepos
1670 )
1675 )
1671 lfdirstate = lfutil.openlfdirstate(ui, repo)
1676 lfdirstate = lfutil.openlfdirstate(ui, repo)
1672 unknown = [
1677 unknown = [
1673 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1678 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1674 ]
1679 ]
1675 ignored = [
1680 ignored = [
1676 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1681 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1677 ]
1682 ]
1678 return scmutil.status(
1683 return scmutil.status(
1679 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1684 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1680 )
1685 )
1681
1686
1682 repo.status = overridestatus
1687 repo.status = overridestatus
1683 orig(ui, repo, *dirs, **opts)
1688 orig(ui, repo, *dirs, **opts)
1684 repo.status = oldstatus
1689 repo.status = oldstatus
1685
1690
1686
1691
1687 @eh.wrapcommand(b'rollback')
1692 @eh.wrapcommand(b'rollback')
1688 def overriderollback(orig, ui, repo, **opts):
1693 def overriderollback(orig, ui, repo, **opts):
1689 with repo.wlock():
1694 with repo.wlock():
1690 before = repo.dirstate.parents()
1695 before = repo.dirstate.parents()
1691 orphans = {
1696 orphans = {
1692 f
1697 f
1693 for f in repo.dirstate
1698 for f in repo.dirstate
1694 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1699 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1695 }
1700 }
1696 result = orig(ui, repo, **opts)
1701 result = orig(ui, repo, **opts)
1697 after = repo.dirstate.parents()
1702 after = repo.dirstate.parents()
1698 if before == after:
1703 if before == after:
1699 return result # no need to restore standins
1704 return result # no need to restore standins
1700
1705
1701 pctx = repo[b'.']
1706 pctx = repo[b'.']
1702 for f in repo.dirstate:
1707 for f in repo.dirstate:
1703 if lfutil.isstandin(f):
1708 if lfutil.isstandin(f):
1704 orphans.discard(f)
1709 orphans.discard(f)
1705 if repo.dirstate.get_entry(f).removed:
1710 if repo.dirstate.get_entry(f).removed:
1706 repo.wvfs.unlinkpath(f, ignoremissing=True)
1711 repo.wvfs.unlinkpath(f, ignoremissing=True)
1707 elif f in pctx:
1712 elif f in pctx:
1708 fctx = pctx[f]
1713 fctx = pctx[f]
1709 repo.wwrite(f, fctx.data(), fctx.flags())
1714 repo.wwrite(f, fctx.data(), fctx.flags())
1710 else:
1715 else:
1711 # content of standin is not so important in 'a',
1716 # content of standin is not so important in 'a',
1712 # 'm' or 'n' (coming from the 2nd parent) cases
1717 # 'm' or 'n' (coming from the 2nd parent) cases
1713 lfutil.writestandin(repo, f, b'', False)
1718 lfutil.writestandin(repo, f, b'', False)
1714 for standin in orphans:
1719 for standin in orphans:
1715 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1720 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1716
1721
1717 return result
1722 return result
1718
1723
1719
1724
1720 @eh.wrapcommand(b'transplant', extension=b'transplant')
1725 @eh.wrapcommand(b'transplant', extension=b'transplant')
1721 def overridetransplant(orig, ui, repo, *revs, **opts):
1726 def overridetransplant(orig, ui, repo, *revs, **opts):
1722 resuming = opts.get('continue')
1727 resuming = opts.get('continue')
1723 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1728 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1724 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1729 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1725 try:
1730 try:
1726 result = orig(ui, repo, *revs, **opts)
1731 result = orig(ui, repo, *revs, **opts)
1727 finally:
1732 finally:
1728 repo._lfstatuswriters.pop()
1733 repo._lfstatuswriters.pop()
1729 repo._lfcommithooks.pop()
1734 repo._lfcommithooks.pop()
1730 return result
1735 return result
1731
1736
1732
1737
1733 @eh.wrapcommand(b'cat')
1738 @eh.wrapcommand(b'cat')
1734 def overridecat(orig, ui, repo, file1, *pats, **opts):
1739 def overridecat(orig, ui, repo, file1, *pats, **opts):
1735 ctx = logcmdutil.revsingle(repo, opts.get('rev'))
1740 ctx = logcmdutil.revsingle(repo, opts.get('rev'))
1736 err = 1
1741 err = 1
1737 notbad = set()
1742 notbad = set()
1738 m = scmutil.match(ctx, (file1,) + pats, pycompat.byteskwargs(opts))
1743 m = scmutil.match(ctx, (file1,) + pats, pycompat.byteskwargs(opts))
1739 m._was_tampered_with = True
1744 m._was_tampered_with = True
1740 origmatchfn = m.matchfn
1745 origmatchfn = m.matchfn
1741
1746
1742 def lfmatchfn(f):
1747 def lfmatchfn(f):
1743 if origmatchfn(f):
1748 if origmatchfn(f):
1744 return True
1749 return True
1745 lf = lfutil.splitstandin(f)
1750 lf = lfutil.splitstandin(f)
1746 if lf is None:
1751 if lf is None:
1747 return False
1752 return False
1748 notbad.add(lf)
1753 notbad.add(lf)
1749 return origmatchfn(lf)
1754 return origmatchfn(lf)
1750
1755
1751 m.matchfn = lfmatchfn
1756 m.matchfn = lfmatchfn
1752 origbadfn = m.bad
1757 origbadfn = m.bad
1753
1758
1754 def lfbadfn(f, msg):
1759 def lfbadfn(f, msg):
1755 if not f in notbad:
1760 if not f in notbad:
1756 origbadfn(f, msg)
1761 origbadfn(f, msg)
1757
1762
1758 m.bad = lfbadfn
1763 m.bad = lfbadfn
1759
1764
1760 origvisitdirfn = m.visitdir
1765 origvisitdirfn = m.visitdir
1761
1766
1762 def lfvisitdirfn(dir):
1767 def lfvisitdirfn(dir):
1763 if dir == lfutil.shortname:
1768 if dir == lfutil.shortname:
1764 return True
1769 return True
1765 ret = origvisitdirfn(dir)
1770 ret = origvisitdirfn(dir)
1766 if ret:
1771 if ret:
1767 return ret
1772 return ret
1768 lf = lfutil.splitstandin(dir)
1773 lf = lfutil.splitstandin(dir)
1769 if lf is None:
1774 if lf is None:
1770 return False
1775 return False
1771 return origvisitdirfn(lf)
1776 return origvisitdirfn(lf)
1772
1777
1773 m.visitdir = lfvisitdirfn
1778 m.visitdir = lfvisitdirfn
1774
1779
1775 for f in ctx.walk(m):
1780 for f in ctx.walk(m):
1776 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1781 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1777 lf = lfutil.splitstandin(f)
1782 lf = lfutil.splitstandin(f)
1778 if lf is None or origmatchfn(f):
1783 if lf is None or origmatchfn(f):
1779 # duplicating unreachable code from commands.cat
1784 # duplicating unreachable code from commands.cat
1780 data = ctx[f].data()
1785 data = ctx[f].data()
1781 if opts.get('decode'):
1786 if opts.get('decode'):
1782 data = repo.wwritedata(f, data)
1787 data = repo.wwritedata(f, data)
1783 fp.write(data)
1788 fp.write(data)
1784 else:
1789 else:
1785 hash = lfutil.readasstandin(ctx[f])
1790 hash = lfutil.readasstandin(ctx[f])
1786 if not lfutil.inusercache(repo.ui, hash):
1791 if not lfutil.inusercache(repo.ui, hash):
1787 store = storefactory.openstore(repo)
1792 store = storefactory.openstore(repo)
1788 success, missing = store.get([(lf, hash)])
1793 success, missing = store.get([(lf, hash)])
1789 if len(success) != 1:
1794 if len(success) != 1:
1790 raise error.Abort(
1795 raise error.Abort(
1791 _(
1796 _(
1792 b'largefile %s is not in cache and could not be '
1797 b'largefile %s is not in cache and could not be '
1793 b'downloaded'
1798 b'downloaded'
1794 )
1799 )
1795 % lf
1800 % lf
1796 )
1801 )
1797 path = lfutil.usercachepath(repo.ui, hash)
1802 path = lfutil.usercachepath(repo.ui, hash)
1798 with open(path, b"rb") as fpin:
1803 with open(path, b"rb") as fpin:
1799 for chunk in util.filechunkiter(fpin):
1804 for chunk in util.filechunkiter(fpin):
1800 fp.write(chunk)
1805 fp.write(chunk)
1801 err = 0
1806 err = 0
1802 return err
1807 return err
1803
1808
1804
1809
1805 @eh.wrapfunction(merge, '_update')
1810 @eh.wrapfunction(merge, '_update')
1806 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1811 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1807 matcher = kwargs.get('matcher', None)
1812 matcher = kwargs.get('matcher', None)
1808 # note if this is a partial update
1813 # note if this is a partial update
1809 partial = matcher and not matcher.always()
1814 partial = matcher and not matcher.always()
1810 with repo.wlock(), repo.dirstate.changing_parents(repo):
1815 with repo.wlock(), repo.dirstate.changing_parents(repo):
1811 # branch | | |
1816 # branch | | |
1812 # merge | force | partial | action
1817 # merge | force | partial | action
1813 # -------+-------+---------+--------------
1818 # -------+-------+---------+--------------
1814 # x | x | x | linear-merge
1819 # x | x | x | linear-merge
1815 # o | x | x | branch-merge
1820 # o | x | x | branch-merge
1816 # x | o | x | overwrite (as clean update)
1821 # x | o | x | overwrite (as clean update)
1817 # o | o | x | force-branch-merge (*1)
1822 # o | o | x | force-branch-merge (*1)
1818 # x | x | o | (*)
1823 # x | x | o | (*)
1819 # o | x | o | (*)
1824 # o | x | o | (*)
1820 # x | o | o | overwrite (as revert)
1825 # x | o | o | overwrite (as revert)
1821 # o | o | o | (*)
1826 # o | o | o | (*)
1822 #
1827 #
1823 # (*) don't care
1828 # (*) don't care
1824 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1829 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1825 with repo.dirstate.running_status(repo):
1830 with repo.dirstate.running_status(repo):
1826 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1831 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1827 unsure, s, mtime_boundary = lfdirstate.status(
1832 unsure, s, mtime_boundary = lfdirstate.status(
1828 matchmod.always(),
1833 matchmod.always(),
1829 subrepos=[],
1834 subrepos=[],
1830 ignored=False,
1835 ignored=False,
1831 clean=True,
1836 clean=True,
1832 unknown=False,
1837 unknown=False,
1833 )
1838 )
1834 oldclean = set(s.clean)
1839 oldclean = set(s.clean)
1835 pctx = repo[b'.']
1840 pctx = repo[b'.']
1836 dctx = repo[node]
1841 dctx = repo[node]
1837 for lfile in unsure + s.modified:
1842 for lfile in unsure + s.modified:
1838 lfileabs = repo.wvfs.join(lfile)
1843 lfileabs = repo.wvfs.join(lfile)
1839 if not repo.wvfs.exists(lfileabs):
1844 if not repo.wvfs.exists(lfileabs):
1840 continue
1845 continue
1841 lfhash = lfutil.hashfile(lfileabs)
1846 lfhash = lfutil.hashfile(lfileabs)
1842 standin = lfutil.standin(lfile)
1847 standin = lfutil.standin(lfile)
1843 lfutil.writestandin(
1848 lfutil.writestandin(
1844 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1849 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1845 )
1850 )
1846 if standin in pctx and lfhash == lfutil.readasstandin(
1851 if standin in pctx and lfhash == lfutil.readasstandin(
1847 pctx[standin]
1852 pctx[standin]
1848 ):
1853 ):
1849 oldclean.add(lfile)
1854 oldclean.add(lfile)
1850 for lfile in s.added:
1855 for lfile in s.added:
1851 fstandin = lfutil.standin(lfile)
1856 fstandin = lfutil.standin(lfile)
1852 if fstandin not in dctx:
1857 if fstandin not in dctx:
1853 # in this case, content of standin file is meaningless
1858 # in this case, content of standin file is meaningless
1854 # (in dctx, lfile is unknown, or normal file)
1859 # (in dctx, lfile is unknown, or normal file)
1855 continue
1860 continue
1856 lfutil.updatestandin(repo, lfile, fstandin)
1861 lfutil.updatestandin(repo, lfile, fstandin)
1857 # mark all clean largefiles as dirty, just in case the update gets
1862 # mark all clean largefiles as dirty, just in case the update gets
1858 # interrupted before largefiles and lfdirstate are synchronized
1863 # interrupted before largefiles and lfdirstate are synchronized
1859 for lfile in oldclean:
1864 for lfile in oldclean:
1860 entry = lfdirstate.get_entry(lfile)
1865 entry = lfdirstate.get_entry(lfile)
1861 lfdirstate.hacky_extension_update_file(
1866 lfdirstate.hacky_extension_update_file(
1862 lfile,
1867 lfile,
1863 wc_tracked=entry.tracked,
1868 wc_tracked=entry.tracked,
1864 p1_tracked=entry.p1_tracked,
1869 p1_tracked=entry.p1_tracked,
1865 p2_info=entry.p2_info,
1870 p2_info=entry.p2_info,
1866 possibly_dirty=True,
1871 possibly_dirty=True,
1867 )
1872 )
1868 lfdirstate.write(repo.currenttransaction())
1873 lfdirstate.write(repo.currenttransaction())
1869
1874
1870 oldstandins = lfutil.getstandinsstate(repo)
1875 oldstandins = lfutil.getstandinsstate(repo)
1871 wc = kwargs.get('wc')
1876 wc = kwargs.get('wc')
1872 if wc and wc.isinmemory():
1877 if wc and wc.isinmemory():
1873 # largefiles is not a good candidate for in-memory merge (large
1878 # largefiles is not a good candidate for in-memory merge (large
1874 # files, custom dirstate, matcher usage).
1879 # files, custom dirstate, matcher usage).
1875 raise error.ProgrammingError(
1880 raise error.ProgrammingError(
1876 b'largefiles is not compatible with in-memory merge'
1881 b'largefiles is not compatible with in-memory merge'
1877 )
1882 )
1878 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1883 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1879
1884
1880 newstandins = lfutil.getstandinsstate(repo)
1885 newstandins = lfutil.getstandinsstate(repo)
1881 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1886 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1882
1887
1883 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1888 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1884 # all the ones that didn't change as clean
1889 # all the ones that didn't change as clean
1885 for lfile in oldclean.difference(filelist):
1890 for lfile in oldclean.difference(filelist):
1886 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1891 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1887
1892
1888 if branchmerge or force or partial:
1893 if branchmerge or force or partial:
1889 filelist.extend(s.deleted + s.removed)
1894 filelist.extend(s.deleted + s.removed)
1890
1895
1891 lfcommands.updatelfiles(
1896 lfcommands.updatelfiles(
1892 repo.ui, repo, filelist=filelist, normallookup=partial
1897 repo.ui, repo, filelist=filelist, normallookup=partial
1893 )
1898 )
1894
1899
1895 return result
1900 return result
1896
1901
1897
1902
1898 @eh.wrapfunction(scmutil, 'marktouched')
1903 @eh.wrapfunction(scmutil, 'marktouched')
1899 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1904 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1900 result = orig(repo, files, *args, **kwargs)
1905 result = orig(repo, files, *args, **kwargs)
1901
1906
1902 filelist = []
1907 filelist = []
1903 for f in files:
1908 for f in files:
1904 lf = lfutil.splitstandin(f)
1909 lf = lfutil.splitstandin(f)
1905 if lf is not None:
1910 if lf is not None:
1906 filelist.append(lf)
1911 filelist.append(lf)
1907 if filelist:
1912 if filelist:
1908 lfcommands.updatelfiles(
1913 lfcommands.updatelfiles(
1909 repo.ui,
1914 repo.ui,
1910 repo,
1915 repo,
1911 filelist=filelist,
1916 filelist=filelist,
1912 printmessage=False,
1917 printmessage=False,
1913 normallookup=True,
1918 normallookup=True,
1914 )
1919 )
1915
1920
1916 return result
1921 return result
1917
1922
1918
1923
1919 @eh.wrapfunction(upgrade_actions, 'preservedrequirements')
1924 @eh.wrapfunction(upgrade_actions, 'preservedrequirements')
1920 @eh.wrapfunction(upgrade_actions, 'supporteddestrequirements')
1925 @eh.wrapfunction(upgrade_actions, 'supporteddestrequirements')
1921 def upgraderequirements(orig, repo):
1926 def upgraderequirements(orig, repo):
1922 reqs = orig(repo)
1927 reqs = orig(repo)
1923 if b'largefiles' in repo.requirements:
1928 if b'largefiles' in repo.requirements:
1924 reqs.add(b'largefiles')
1929 reqs.add(b'largefiles')
1925 return reqs
1930 return reqs
1926
1931
1927
1932
1928 _lfscheme = b'largefile://'
1933 _lfscheme = b'largefile://'
1929
1934
1930
1935
1931 @eh.wrapfunction(urlmod, 'open')
1936 @eh.wrapfunction(urlmod, 'open')
1932 def openlargefile(orig, ui, url_, data=None, **kwargs):
1937 def openlargefile(orig, ui, url_, data=None, **kwargs):
1933 if url_.startswith(_lfscheme):
1938 if url_.startswith(_lfscheme):
1934 if data:
1939 if data:
1935 msg = b"cannot use data on a 'largefile://' url"
1940 msg = b"cannot use data on a 'largefile://' url"
1936 raise error.ProgrammingError(msg)
1941 raise error.ProgrammingError(msg)
1937 lfid = url_[len(_lfscheme) :]
1942 lfid = url_[len(_lfscheme) :]
1938 return storefactory.getlfile(ui, lfid)
1943 return storefactory.getlfile(ui, lfid)
1939 else:
1944 else:
1940 return orig(ui, url_, data=data, **kwargs)
1945 return orig(ui, url_, data=data, **kwargs)
@@ -1,1891 +1,1891
1 """ Multicast DNS Service Discovery for Python, v0.12
1 """ Multicast DNS Service Discovery for Python, v0.12
2 Copyright (C) 2003, Paul Scott-Murphy
2 Copyright (C) 2003, Paul Scott-Murphy
3
3
4 This module provides a framework for the use of DNS Service Discovery
4 This module provides a framework for the use of DNS Service Discovery
5 using IP multicast. It has been tested against the JRendezvous
5 using IP multicast. It has been tested against the JRendezvous
6 implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
6 implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
7 and against the mDNSResponder from Mac OS X 10.3.8.
7 and against the mDNSResponder from Mac OS X 10.3.8.
8
8
9 This library is free software; you can redistribute it and/or
9 This library is free software; you can redistribute it and/or
10 modify it under the terms of the GNU Lesser General Public
10 modify it under the terms of the GNU Lesser General Public
11 License as published by the Free Software Foundation; either
11 License as published by the Free Software Foundation; either
12 version 2.1 of the License, or (at your option) any later version.
12 version 2.1 of the License, or (at your option) any later version.
13
13
14 This library is distributed in the hope that it will be useful,
14 This library is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 Lesser General Public License for more details.
17 Lesser General Public License for more details.
18
18
19 You should have received a copy of the GNU Lesser General Public
19 You should have received a copy of the GNU Lesser General Public
20 License along with this library; if not, see
20 License along with this library; if not, see
21 <http://www.gnu.org/licenses/>.
21 <http://www.gnu.org/licenses/>.
22
22
23 """
23 """
24
24
25 """0.12 update - allow selection of binding interface
25 """0.12 update - allow selection of binding interface
26 typo fix - Thanks A. M. Kuchlingi
26 typo fix - Thanks A. M. Kuchlingi
27 removed all use of word 'Rendezvous' - this is an API change"""
27 removed all use of word 'Rendezvous' - this is an API change"""
28
28
29 """0.11 update - correction to comments for addListener method
29 """0.11 update - correction to comments for addListener method
30 support for new record types seen from OS X
30 support for new record types seen from OS X
31 - IPv6 address
31 - IPv6 address
32 - hostinfo
32 - hostinfo
33 ignore unknown DNS record types
33 ignore unknown DNS record types
34 fixes to name decoding
34 fixes to name decoding
35 works alongside other processes using port 5353 (e.g. Mac OS X)
35 works alongside other processes using port 5353 (e.g. Mac OS X)
36 tested against Mac OS X 10.3.2's mDNSResponder
36 tested against Mac OS X 10.3.2's mDNSResponder
37 corrections to removal of list entries for service browser"""
37 corrections to removal of list entries for service browser"""
38
38
39 """0.10 update - Jonathon Paisley contributed these corrections:
39 """0.10 update - Jonathon Paisley contributed these corrections:
40 always multicast replies, even when query is unicast
40 always multicast replies, even when query is unicast
41 correct a pointer encoding problem
41 correct a pointer encoding problem
42 can now write records in any order
42 can now write records in any order
43 traceback shown on failure
43 traceback shown on failure
44 better TXT record parsing
44 better TXT record parsing
45 server is now separate from name
45 server is now separate from name
46 can cancel a service browser
46 can cancel a service browser
47
47
48 modified some unit tests to accommodate these changes"""
48 modified some unit tests to accommodate these changes"""
49
49
50 """0.09 update - remove all records on service unregistration
50 """0.09 update - remove all records on service unregistration
51 fix DOS security problem with readName"""
51 fix DOS security problem with readName"""
52
52
53 """0.08 update - changed licensing to LGPL"""
53 """0.08 update - changed licensing to LGPL"""
54
54
55 """0.07 update - faster shutdown on engine
55 """0.07 update - faster shutdown on engine
56 pointer encoding of outgoing names
56 pointer encoding of outgoing names
57 ServiceBrowser now works
57 ServiceBrowser now works
58 new unit tests"""
58 new unit tests"""
59
59
60 """0.06 update - small improvements with unit tests
60 """0.06 update - small improvements with unit tests
61 added defined exception types
61 added defined exception types
62 new style objects
62 new style objects
63 fixed hostname/interface problem
63 fixed hostname/interface problem
64 fixed socket timeout problem
64 fixed socket timeout problem
65 fixed addServiceListener() typo bug
65 fixed addServiceListener() typo bug
66 using select() for socket reads
66 using select() for socket reads
67 tested on Debian unstable with Python 2.2.2"""
67 tested on Debian unstable with Python 2.2.2"""
68
68
69 """0.05 update - ensure case insensitivity on domain names
69 """0.05 update - ensure case insensitivity on domain names
70 support for unicast DNS queries"""
70 support for unicast DNS queries"""
71
71
72 """0.04 update - added some unit tests
72 """0.04 update - added some unit tests
73 added __ne__ adjuncts where required
73 added __ne__ adjuncts where required
74 ensure names end in '.local.'
74 ensure names end in '.local.'
75 timeout on receiving socket for clean shutdown"""
75 timeout on receiving socket for clean shutdown"""
76
76
77 __author__ = b"Paul Scott-Murphy"
77 __author__ = b"Paul Scott-Murphy"
78 __email__ = b"paul at scott dash murphy dot com"
78 __email__ = b"paul at scott dash murphy dot com"
79 __version__ = b"0.12"
79 __version__ = b"0.12"
80
80
81 import errno
81 import errno
82 import itertools
82 import itertools
83 import select
83 import select
84 import socket
84 import socket
85 import struct
85 import struct
86 import threading
86 import threading
87 import time
87 import time
88 import traceback
88 import traceback
89
89
90 from mercurial import pycompat
90 from mercurial import pycompat
91
91
92 __all__ = [b"Zeroconf", b"ServiceInfo", b"ServiceBrowser"]
92 __all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
93
93
94 # hook for threads
94 # hook for threads
95
95
96 globals()[b'_GLOBAL_DONE'] = 0
96 globals()[b'_GLOBAL_DONE'] = 0
97
97
98 # Some timing constants
98 # Some timing constants
99
99
100 _UNREGISTER_TIME = 125
100 _UNREGISTER_TIME = 125
101 _CHECK_TIME = 175
101 _CHECK_TIME = 175
102 _REGISTER_TIME = 225
102 _REGISTER_TIME = 225
103 _LISTENER_TIME = 200
103 _LISTENER_TIME = 200
104 _BROWSER_TIME = 500
104 _BROWSER_TIME = 500
105
105
106 # Some DNS constants
106 # Some DNS constants
107
107
108 _MDNS_ADDR = r'224.0.0.251'
108 _MDNS_ADDR = r'224.0.0.251'
109 _MDNS_PORT = 5353
109 _MDNS_PORT = 5353
110 _DNS_PORT = 53
110 _DNS_PORT = 53
111 _DNS_TTL = 60 * 60 # one hour default TTL
111 _DNS_TTL = 60 * 60 # one hour default TTL
112
112
113 _MAX_MSG_TYPICAL = 1460 # unused
113 _MAX_MSG_TYPICAL = 1460 # unused
114 _MAX_MSG_ABSOLUTE = 8972
114 _MAX_MSG_ABSOLUTE = 8972
115
115
116 _FLAGS_QR_MASK = 0x8000 # query response mask
116 _FLAGS_QR_MASK = 0x8000 # query response mask
117 _FLAGS_QR_QUERY = 0x0000 # query
117 _FLAGS_QR_QUERY = 0x0000 # query
118 _FLAGS_QR_RESPONSE = 0x8000 # response
118 _FLAGS_QR_RESPONSE = 0x8000 # response
119
119
120 _FLAGS_AA = 0x0400 # Authoritative answer
120 _FLAGS_AA = 0x0400 # Authoritative answer
121 _FLAGS_TC = 0x0200 # Truncated
121 _FLAGS_TC = 0x0200 # Truncated
122 _FLAGS_RD = 0x0100 # Recursion desired
122 _FLAGS_RD = 0x0100 # Recursion desired
123 _FLAGS_RA = 0x8000 # Recursion available
123 _FLAGS_RA = 0x8000 # Recursion available
124
124
125 _FLAGS_Z = 0x0040 # Zero
125 _FLAGS_Z = 0x0040 # Zero
126 _FLAGS_AD = 0x0020 # Authentic data
126 _FLAGS_AD = 0x0020 # Authentic data
127 _FLAGS_CD = 0x0010 # Checking disabled
127 _FLAGS_CD = 0x0010 # Checking disabled
128
128
129 _CLASS_IN = 1
129 _CLASS_IN = 1
130 _CLASS_CS = 2
130 _CLASS_CS = 2
131 _CLASS_CH = 3
131 _CLASS_CH = 3
132 _CLASS_HS = 4
132 _CLASS_HS = 4
133 _CLASS_NONE = 254
133 _CLASS_NONE = 254
134 _CLASS_ANY = 255
134 _CLASS_ANY = 255
135 _CLASS_MASK = 0x7FFF
135 _CLASS_MASK = 0x7FFF
136 _CLASS_UNIQUE = 0x8000
136 _CLASS_UNIQUE = 0x8000
137
137
138 _TYPE_A = 1
138 _TYPE_A = 1
139 _TYPE_NS = 2
139 _TYPE_NS = 2
140 _TYPE_MD = 3
140 _TYPE_MD = 3
141 _TYPE_MF = 4
141 _TYPE_MF = 4
142 _TYPE_CNAME = 5
142 _TYPE_CNAME = 5
143 _TYPE_SOA = 6
143 _TYPE_SOA = 6
144 _TYPE_MB = 7
144 _TYPE_MB = 7
145 _TYPE_MG = 8
145 _TYPE_MG = 8
146 _TYPE_MR = 9
146 _TYPE_MR = 9
147 _TYPE_NULL = 10
147 _TYPE_NULL = 10
148 _TYPE_WKS = 11
148 _TYPE_WKS = 11
149 _TYPE_PTR = 12
149 _TYPE_PTR = 12
150 _TYPE_HINFO = 13
150 _TYPE_HINFO = 13
151 _TYPE_MINFO = 14
151 _TYPE_MINFO = 14
152 _TYPE_MX = 15
152 _TYPE_MX = 15
153 _TYPE_TXT = 16
153 _TYPE_TXT = 16
154 _TYPE_AAAA = 28
154 _TYPE_AAAA = 28
155 _TYPE_SRV = 33
155 _TYPE_SRV = 33
156 _TYPE_ANY = 255
156 _TYPE_ANY = 255
157
157
158 # Mapping constants to names
158 # Mapping constants to names
159
159
160 _CLASSES = {
160 _CLASSES = {
161 _CLASS_IN: b"in",
161 _CLASS_IN: b"in",
162 _CLASS_CS: b"cs",
162 _CLASS_CS: b"cs",
163 _CLASS_CH: b"ch",
163 _CLASS_CH: b"ch",
164 _CLASS_HS: b"hs",
164 _CLASS_HS: b"hs",
165 _CLASS_NONE: b"none",
165 _CLASS_NONE: b"none",
166 _CLASS_ANY: b"any",
166 _CLASS_ANY: b"any",
167 }
167 }
168
168
169 _TYPES = {
169 _TYPES = {
170 _TYPE_A: b"a",
170 _TYPE_A: b"a",
171 _TYPE_NS: b"ns",
171 _TYPE_NS: b"ns",
172 _TYPE_MD: b"md",
172 _TYPE_MD: b"md",
173 _TYPE_MF: b"mf",
173 _TYPE_MF: b"mf",
174 _TYPE_CNAME: b"cname",
174 _TYPE_CNAME: b"cname",
175 _TYPE_SOA: b"soa",
175 _TYPE_SOA: b"soa",
176 _TYPE_MB: b"mb",
176 _TYPE_MB: b"mb",
177 _TYPE_MG: b"mg",
177 _TYPE_MG: b"mg",
178 _TYPE_MR: b"mr",
178 _TYPE_MR: b"mr",
179 _TYPE_NULL: b"null",
179 _TYPE_NULL: b"null",
180 _TYPE_WKS: b"wks",
180 _TYPE_WKS: b"wks",
181 _TYPE_PTR: b"ptr",
181 _TYPE_PTR: b"ptr",
182 _TYPE_HINFO: b"hinfo",
182 _TYPE_HINFO: b"hinfo",
183 _TYPE_MINFO: b"minfo",
183 _TYPE_MINFO: b"minfo",
184 _TYPE_MX: b"mx",
184 _TYPE_MX: b"mx",
185 _TYPE_TXT: b"txt",
185 _TYPE_TXT: b"txt",
186 _TYPE_AAAA: b"quada",
186 _TYPE_AAAA: b"quada",
187 _TYPE_SRV: b"srv",
187 _TYPE_SRV: b"srv",
188 _TYPE_ANY: b"any",
188 _TYPE_ANY: b"any",
189 }
189 }
190
190
191 # utility functions
191 # utility functions
192
192
193
193
194 def currentTimeMillis():
194 def currentTimeMillis():
195 """Current system time in milliseconds"""
195 """Current system time in milliseconds"""
196 return time.time() * 1000
196 return time.time() * 1000
197
197
198
198
199 # Exceptions
199 # Exceptions
200
200
201
201
202 class NonLocalNameException(Exception):
202 class NonLocalNameException(Exception):
203 pass
203 pass
204
204
205
205
206 class NonUniqueNameException(Exception):
206 class NonUniqueNameException(Exception):
207 pass
207 pass
208
208
209
209
210 class NamePartTooLongException(Exception):
210 class NamePartTooLongException(Exception):
211 pass
211 pass
212
212
213
213
214 class AbstractMethodException(Exception):
214 class AbstractMethodException(Exception):
215 pass
215 pass
216
216
217
217
218 class BadTypeInNameException(Exception):
218 class BadTypeInNameException(Exception):
219 pass
219 pass
220
220
221
221
222 class BadDomainName(Exception):
222 class BadDomainName(Exception):
223 def __init__(self, pos):
223 def __init__(self, pos):
224 Exception.__init__(self, b"at position %s" % pos)
224 Exception.__init__(self, b"at position %s" % pos)
225
225
226
226
227 class BadDomainNameCircular(BadDomainName):
227 class BadDomainNameCircular(BadDomainName):
228 pass
228 pass
229
229
230
230
231 # implementation classes
231 # implementation classes
232
232
233
233
234 class DNSEntry:
234 class DNSEntry:
235 """A DNS entry"""
235 """A DNS entry"""
236
236
237 def __init__(self, name, type, clazz):
237 def __init__(self, name, type, clazz):
238 self.key = name.lower()
238 self.key = name.lower()
239 self.name = name
239 self.name = name
240 self.type = type
240 self.type = type
241 self.clazz = clazz & _CLASS_MASK
241 self.clazz = clazz & _CLASS_MASK
242 self.unique = (clazz & _CLASS_UNIQUE) != 0
242 self.unique = (clazz & _CLASS_UNIQUE) != 0
243
243
244 def __eq__(self, other):
244 def __eq__(self, other):
245 """Equality test on name, type, and class"""
245 """Equality test on name, type, and class"""
246 if isinstance(other, DNSEntry):
246 if isinstance(other, DNSEntry):
247 return (
247 return (
248 self.name == other.name
248 self.name == other.name
249 and self.type == other.type
249 and self.type == other.type
250 and self.clazz == other.clazz
250 and self.clazz == other.clazz
251 )
251 )
252 return 0
252 return 0
253
253
254 def __ne__(self, other):
254 def __ne__(self, other):
255 """Non-equality test"""
255 """Non-equality test"""
256 return not self.__eq__(other)
256 return not self.__eq__(other)
257
257
258 def getClazz(self, clazz):
258 def getClazz(self, clazz):
259 """Class accessor"""
259 """Class accessor"""
260 try:
260 try:
261 return _CLASSES[clazz]
261 return _CLASSES[clazz]
262 except KeyError:
262 except KeyError:
263 return b"?(%s)" % clazz
263 return b"?(%s)" % clazz
264
264
265 def getType(self, type):
265 def getType(self, type):
266 """Type accessor"""
266 """Type accessor"""
267 try:
267 try:
268 return _TYPES[type]
268 return _TYPES[type]
269 except KeyError:
269 except KeyError:
270 return b"?(%s)" % type
270 return b"?(%s)" % type
271
271
272 def toString(self, hdr, other):
272 def toString(self, hdr, other):
273 """String representation with additional information"""
273 """String representation with additional information"""
274 result = b"%s[%s,%s" % (
274 result = b"%s[%s,%s" % (
275 hdr,
275 hdr,
276 self.getType(self.type),
276 self.getType(self.type),
277 self.getClazz(self.clazz),
277 self.getClazz(self.clazz),
278 )
278 )
279 if self.unique:
279 if self.unique:
280 result += b"-unique,"
280 result += b"-unique,"
281 else:
281 else:
282 result += b","
282 result += b","
283 result += self.name
283 result += self.name
284 if other is not None:
284 if other is not None:
285 result += b",%s]" % other
285 result += b",%s]" % other
286 else:
286 else:
287 result += b"]"
287 result += b"]"
288 return result
288 return result
289
289
290
290
291 class DNSQuestion(DNSEntry):
291 class DNSQuestion(DNSEntry):
292 """A DNS question entry"""
292 """A DNS question entry"""
293
293
294 def __init__(self, name, type, clazz):
294 def __init__(self, name, type, clazz):
295 if isinstance(name, str):
295 if isinstance(name, str):
296 name = name.encode('ascii')
296 name = name.encode('ascii')
297 if not name.endswith(b".local."):
297 if not name.endswith(b".local."):
298 raise NonLocalNameException(name)
298 raise NonLocalNameException(name)
299 DNSEntry.__init__(self, name, type, clazz)
299 DNSEntry.__init__(self, name, type, clazz)
300
300
301 def answeredBy(self, rec):
301 def answeredBy(self, rec):
302 """Returns true if the question is answered by the record"""
302 """Returns true if the question is answered by the record"""
303 return (
303 return (
304 self.clazz == rec.clazz
304 self.clazz == rec.clazz
305 and (self.type == rec.type or self.type == _TYPE_ANY)
305 and (self.type == rec.type or self.type == _TYPE_ANY)
306 and self.name == rec.name
306 and self.name == rec.name
307 )
307 )
308
308
309 def __repr__(self):
309 def __repr__(self):
310 """String representation"""
310 """String representation"""
311 return DNSEntry.toString(self, b"question", None)
311 return DNSEntry.toString(self, b"question", None)
312
312
313
313
314 class DNSRecord(DNSEntry):
314 class DNSRecord(DNSEntry):
315 """A DNS record - like a DNS entry, but has a TTL"""
315 """A DNS record - like a DNS entry, but has a TTL"""
316
316
317 def __init__(self, name, type, clazz, ttl):
317 def __init__(self, name, type, clazz, ttl):
318 DNSEntry.__init__(self, name, type, clazz)
318 DNSEntry.__init__(self, name, type, clazz)
319 self.ttl = ttl
319 self.ttl = ttl
320 self.created = currentTimeMillis()
320 self.created = currentTimeMillis()
321
321
322 def __eq__(self, other):
322 def __eq__(self, other):
323 """Tests equality as per DNSRecord"""
323 """Tests equality as per DNSRecord"""
324 if isinstance(other, DNSRecord):
324 if isinstance(other, DNSRecord):
325 return DNSEntry.__eq__(self, other)
325 return DNSEntry.__eq__(self, other)
326 return 0
326 return 0
327
327
328 def suppressedBy(self, msg):
328 def suppressedBy(self, msg):
329 """Returns true if any answer in a message can suffice for the
329 """Returns true if any answer in a message can suffice for the
330 information held in this record."""
330 information held in this record."""
331 for record in msg.answers:
331 for record in msg.answers:
332 if self.suppressedByAnswer(record):
332 if self.suppressedByAnswer(record):
333 return 1
333 return 1
334 return 0
334 return 0
335
335
336 def suppressedByAnswer(self, other):
336 def suppressedByAnswer(self, other):
337 """Returns true if another record has same name, type and class,
337 """Returns true if another record has same name, type and class,
338 and if its TTL is at least half of this record's."""
338 and if its TTL is at least half of this record's."""
339 if self == other and other.ttl > (self.ttl / 2):
339 if self == other and other.ttl > (self.ttl / 2):
340 return 1
340 return 1
341 return 0
341 return 0
342
342
343 def getExpirationTime(self, percent):
343 def getExpirationTime(self, percent):
344 """Returns the time at which this record will have expired
344 """Returns the time at which this record will have expired
345 by a certain percentage."""
345 by a certain percentage."""
346 return self.created + (percent * self.ttl * 10)
346 return self.created + (percent * self.ttl * 10)
347
347
348 def getRemainingTTL(self, now):
348 def getRemainingTTL(self, now):
349 """Returns the remaining TTL in seconds."""
349 """Returns the remaining TTL in seconds."""
350 return max(0, (self.getExpirationTime(100) - now) / 1000)
350 return max(0, (self.getExpirationTime(100) - now) / 1000)
351
351
352 def isExpired(self, now):
352 def isExpired(self, now):
353 """Returns true if this record has expired."""
353 """Returns true if this record has expired."""
354 return self.getExpirationTime(100) <= now
354 return self.getExpirationTime(100) <= now
355
355
356 def isStale(self, now):
356 def isStale(self, now):
357 """Returns true if this record is at least half way expired."""
357 """Returns true if this record is at least half way expired."""
358 return self.getExpirationTime(50) <= now
358 return self.getExpirationTime(50) <= now
359
359
360 def resetTTL(self, other):
360 def resetTTL(self, other):
361 """Sets this record's TTL and created time to that of
361 """Sets this record's TTL and created time to that of
362 another record."""
362 another record."""
363 self.created = other.created
363 self.created = other.created
364 self.ttl = other.ttl
364 self.ttl = other.ttl
365
365
366 def write(self, out):
366 def write(self, out):
367 """Abstract method"""
367 """Abstract method"""
368 raise AbstractMethodException
368 raise AbstractMethodException
369
369
370 def toString(self, other):
370 def toString(self, other):
371 """String representation with additional information"""
371 """String representation with additional information"""
372 arg = b"%s/%s,%s" % (
372 arg = b"%s/%s,%s" % (
373 self.ttl,
373 self.ttl,
374 self.getRemainingTTL(currentTimeMillis()),
374 self.getRemainingTTL(currentTimeMillis()),
375 other,
375 other,
376 )
376 )
377 return DNSEntry.toString(self, b"record", arg)
377 return DNSEntry.toString(self, b"record", arg)
378
378
379
379
380 class DNSAddress(DNSRecord):
380 class DNSAddress(DNSRecord):
381 """A DNS address record"""
381 """A DNS address record"""
382
382
383 def __init__(self, name, type, clazz, ttl, address):
383 def __init__(self, name, type, clazz, ttl, address):
384 DNSRecord.__init__(self, name, type, clazz, ttl)
384 DNSRecord.__init__(self, name, type, clazz, ttl)
385 self.address = address
385 self.address = address
386
386
387 def write(self, out):
387 def write(self, out):
388 """Used in constructing an outgoing packet"""
388 """Used in constructing an outgoing packet"""
389 out.writeString(self.address, len(self.address))
389 out.writeString(self.address, len(self.address))
390
390
391 def __eq__(self, other):
391 def __eq__(self, other):
392 """Tests equality on address"""
392 """Tests equality on address"""
393 if isinstance(other, DNSAddress):
393 if isinstance(other, DNSAddress):
394 return self.address == other.address
394 return self.address == other.address
395 return 0
395 return 0
396
396
397 def __repr__(self):
397 def __repr__(self):
398 """String representation"""
398 """String representation"""
399 try:
399 try:
400 return socket.inet_ntoa(self.address)
400 return socket.inet_ntoa(self.address)
401 except Exception:
401 except Exception:
402 return self.address
402 return self.address
403
403
404
404
405 class DNSHinfo(DNSRecord):
405 class DNSHinfo(DNSRecord):
406 """A DNS host information record"""
406 """A DNS host information record"""
407
407
408 def __init__(self, name, type, clazz, ttl, cpu, os):
408 def __init__(self, name, type, clazz, ttl, cpu, os):
409 DNSRecord.__init__(self, name, type, clazz, ttl)
409 DNSRecord.__init__(self, name, type, clazz, ttl)
410 self.cpu = cpu
410 self.cpu = cpu
411 self.os = os
411 self.os = os
412
412
413 def write(self, out):
413 def write(self, out):
414 """Used in constructing an outgoing packet"""
414 """Used in constructing an outgoing packet"""
415 out.writeString(self.cpu, len(self.cpu))
415 out.writeString(self.cpu, len(self.cpu))
416 out.writeString(self.os, len(self.os))
416 out.writeString(self.os, len(self.os))
417
417
418 def __eq__(self, other):
418 def __eq__(self, other):
419 """Tests equality on cpu and os"""
419 """Tests equality on cpu and os"""
420 if isinstance(other, DNSHinfo):
420 if isinstance(other, DNSHinfo):
421 return self.cpu == other.cpu and self.os == other.os
421 return self.cpu == other.cpu and self.os == other.os
422 return 0
422 return 0
423
423
424 def __repr__(self):
424 def __repr__(self):
425 """String representation"""
425 """String representation"""
426 return self.cpu + b" " + self.os
426 return self.cpu + b" " + self.os
427
427
428
428
429 class DNSPointer(DNSRecord):
429 class DNSPointer(DNSRecord):
430 """A DNS pointer record"""
430 """A DNS pointer record"""
431
431
432 def __init__(self, name, type, clazz, ttl, alias):
432 def __init__(self, name, type, clazz, ttl, alias):
433 DNSRecord.__init__(self, name, type, clazz, ttl)
433 DNSRecord.__init__(self, name, type, clazz, ttl)
434 self.alias = alias
434 self.alias = alias
435
435
436 def write(self, out):
436 def write(self, out):
437 """Used in constructing an outgoing packet"""
437 """Used in constructing an outgoing packet"""
438 out.writeName(self.alias)
438 out.writeName(self.alias)
439
439
440 def __eq__(self, other):
440 def __eq__(self, other):
441 """Tests equality on alias"""
441 """Tests equality on alias"""
442 if isinstance(other, DNSPointer):
442 if isinstance(other, DNSPointer):
443 return self.alias == other.alias
443 return self.alias == other.alias
444 return 0
444 return 0
445
445
446 def __repr__(self):
446 def __repr__(self):
447 """String representation"""
447 """String representation"""
448 return self.toString(self.alias)
448 return self.toString(self.alias)
449
449
450
450
451 class DNSText(DNSRecord):
451 class DNSText(DNSRecord):
452 """A DNS text record"""
452 """A DNS text record"""
453
453
454 def __init__(self, name, type, clazz, ttl, text):
454 def __init__(self, name, type, clazz, ttl, text):
455 DNSRecord.__init__(self, name, type, clazz, ttl)
455 DNSRecord.__init__(self, name, type, clazz, ttl)
456 self.text = text
456 self.text = text
457
457
458 def write(self, out):
458 def write(self, out):
459 """Used in constructing an outgoing packet"""
459 """Used in constructing an outgoing packet"""
460 out.writeString(self.text, len(self.text))
460 out.writeString(self.text, len(self.text))
461
461
462 def __eq__(self, other):
462 def __eq__(self, other):
463 """Tests equality on text"""
463 """Tests equality on text"""
464 if isinstance(other, DNSText):
464 if isinstance(other, DNSText):
465 return self.text == other.text
465 return self.text == other.text
466 return 0
466 return 0
467
467
468 def __repr__(self):
468 def __repr__(self):
469 """String representation"""
469 """String representation"""
470 if len(self.text) > 10:
470 if len(self.text) > 10:
471 return self.toString(self.text[:7] + b"...")
471 return self.toString(self.text[:7] + b"...")
472 else:
472 else:
473 return self.toString(self.text)
473 return self.toString(self.text)
474
474
475
475
476 class DNSService(DNSRecord):
476 class DNSService(DNSRecord):
477 """A DNS service record"""
477 """A DNS service record"""
478
478
479 def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
479 def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
480 DNSRecord.__init__(self, name, type, clazz, ttl)
480 DNSRecord.__init__(self, name, type, clazz, ttl)
481 self.priority = priority
481 self.priority = priority
482 self.weight = weight
482 self.weight = weight
483 self.port = port
483 self.port = port
484 self.server = server
484 self.server = server
485
485
486 def write(self, out):
486 def write(self, out):
487 """Used in constructing an outgoing packet"""
487 """Used in constructing an outgoing packet"""
488 out.writeShort(self.priority)
488 out.writeShort(self.priority)
489 out.writeShort(self.weight)
489 out.writeShort(self.weight)
490 out.writeShort(self.port)
490 out.writeShort(self.port)
491 out.writeName(self.server)
491 out.writeName(self.server)
492
492
493 def __eq__(self, other):
493 def __eq__(self, other):
494 """Tests equality on priority, weight, port and server"""
494 """Tests equality on priority, weight, port and server"""
495 if isinstance(other, DNSService):
495 if isinstance(other, DNSService):
496 return (
496 return (
497 self.priority == other.priority
497 self.priority == other.priority
498 and self.weight == other.weight
498 and self.weight == other.weight
499 and self.port == other.port
499 and self.port == other.port
500 and self.server == other.server
500 and self.server == other.server
501 )
501 )
502 return 0
502 return 0
503
503
504 def __repr__(self):
504 def __repr__(self):
505 """String representation"""
505 """String representation"""
506 return self.toString(b"%s:%s" % (self.server, self.port))
506 return self.toString(b"%s:%s" % (self.server, self.port))
507
507
508
508
509 class DNSIncoming:
509 class DNSIncoming:
510 """Object representation of an incoming DNS packet"""
510 """Object representation of an incoming DNS packet"""
511
511
512 def __init__(self, data):
512 def __init__(self, data):
513 """Constructor from string holding bytes of packet"""
513 """Constructor from string holding bytes of packet"""
514 self.offset = 0
514 self.offset = 0
515 self.data = data
515 self.data = data
516 self.questions = []
516 self.questions = []
517 self.answers = []
517 self.answers = []
518 self.numquestions = 0
518 self.numquestions = 0
519 self.numanswers = 0
519 self.numanswers = 0
520 self.numauthorities = 0
520 self.numauthorities = 0
521 self.numadditionals = 0
521 self.numadditionals = 0
522
522
523 self.readHeader()
523 self.readHeader()
524 self.readQuestions()
524 self.readQuestions()
525 self.readOthers()
525 self.readOthers()
526
526
527 def readHeader(self):
527 def readHeader(self):
528 """Reads header portion of packet"""
528 """Reads header portion of packet"""
529 format = b'!HHHHHH'
529 format = b'!HHHHHH'
530 length = struct.calcsize(format)
530 length = struct.calcsize(format)
531 info = struct.unpack(
531 info = struct.unpack(
532 format, self.data[self.offset : self.offset + length]
532 format, self.data[self.offset : self.offset + length]
533 )
533 )
534 self.offset += length
534 self.offset += length
535
535
536 self.id = info[0]
536 self.id = info[0]
537 self.flags = info[1]
537 self.flags = info[1]
538 self.numquestions = info[2]
538 self.numquestions = info[2]
539 self.numanswers = info[3]
539 self.numanswers = info[3]
540 self.numauthorities = info[4]
540 self.numauthorities = info[4]
541 self.numadditionals = info[5]
541 self.numadditionals = info[5]
542
542
543 def readQuestions(self):
543 def readQuestions(self):
544 """Reads questions section of packet"""
544 """Reads questions section of packet"""
545 format = b'!HH'
545 format = b'!HH'
546 length = struct.calcsize(format)
546 length = struct.calcsize(format)
547 for i in range(0, self.numquestions):
547 for i in range(0, self.numquestions):
548 name = self.readName()
548 name = self.readName()
549 info = struct.unpack(
549 info = struct.unpack(
550 format, self.data[self.offset : self.offset + length]
550 format, self.data[self.offset : self.offset + length]
551 )
551 )
552 self.offset += length
552 self.offset += length
553
553
554 try:
554 try:
555 question = DNSQuestion(name, info[0], info[1])
555 question = DNSQuestion(name, info[0], info[1])
556 self.questions.append(question)
556 self.questions.append(question)
557 except NonLocalNameException:
557 except NonLocalNameException:
558 pass
558 pass
559
559
560 def readInt(self):
560 def readInt(self):
561 """Reads an integer from the packet"""
561 """Reads an integer from the packet"""
562 format = b'!I'
562 format = b'!I'
563 length = struct.calcsize(format)
563 length = struct.calcsize(format)
564 info = struct.unpack(
564 info = struct.unpack(
565 format, self.data[self.offset : self.offset + length]
565 format, self.data[self.offset : self.offset + length]
566 )
566 )
567 self.offset += length
567 self.offset += length
568 return info[0]
568 return info[0]
569
569
570 def readCharacterString(self):
570 def readCharacterString(self):
571 """Reads a character string from the packet"""
571 """Reads a character string from the packet"""
572 length = ord(self.data[self.offset])
572 length = ord(self.data[self.offset])
573 self.offset += 1
573 self.offset += 1
574 return self.readString(length)
574 return self.readString(length)
575
575
576 def readString(self, len):
576 def readString(self, len):
577 """Reads a string of a given length from the packet"""
577 """Reads a string of a given length from the packet"""
578 format = b'!%ds' % len
578 format = b'!%ds' % len
579 length = struct.calcsize(format)
579 length = struct.calcsize(format)
580 info = struct.unpack(
580 info = struct.unpack(
581 format, self.data[self.offset : self.offset + length]
581 format, self.data[self.offset : self.offset + length]
582 )
582 )
583 self.offset += length
583 self.offset += length
584 return info[0]
584 return info[0]
585
585
586 def readUnsignedShort(self):
586 def readUnsignedShort(self):
587 """Reads an unsigned short from the packet"""
587 """Reads an unsigned short from the packet"""
588 format = b'!H'
588 format = b'!H'
589 length = struct.calcsize(format)
589 length = struct.calcsize(format)
590 info = struct.unpack(
590 info = struct.unpack(
591 format, self.data[self.offset : self.offset + length]
591 format, self.data[self.offset : self.offset + length]
592 )
592 )
593 self.offset += length
593 self.offset += length
594 return info[0]
594 return info[0]
595
595
596 def readOthers(self):
596 def readOthers(self):
597 """Reads answers, authorities and additionals section of the packet"""
597 """Reads answers, authorities and additionals section of the packet"""
598 format = b'!HHiH'
598 format = b'!HHiH'
599 length = struct.calcsize(format)
599 length = struct.calcsize(format)
600 n = self.numanswers + self.numauthorities + self.numadditionals
600 n = self.numanswers + self.numauthorities + self.numadditionals
601 for i in range(0, n):
601 for i in range(0, n):
602 domain = self.readName()
602 domain = self.readName()
603 info = struct.unpack(
603 info = struct.unpack(
604 format, self.data[self.offset : self.offset + length]
604 format, self.data[self.offset : self.offset + length]
605 )
605 )
606 self.offset += length
606 self.offset += length
607
607
608 rec = None
608 rec = None
609 if info[0] == _TYPE_A:
609 if info[0] == _TYPE_A:
610 rec = DNSAddress(
610 rec = DNSAddress(
611 domain, info[0], info[1], info[2], self.readString(4)
611 domain, info[0], info[1], info[2], self.readString(4)
612 )
612 )
613 elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
613 elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
614 rec = DNSPointer(
614 rec = DNSPointer(
615 domain, info[0], info[1], info[2], self.readName()
615 domain, info[0], info[1], info[2], self.readName()
616 )
616 )
617 elif info[0] == _TYPE_TXT:
617 elif info[0] == _TYPE_TXT:
618 rec = DNSText(
618 rec = DNSText(
619 domain, info[0], info[1], info[2], self.readString(info[3])
619 domain, info[0], info[1], info[2], self.readString(info[3])
620 )
620 )
621 elif info[0] == _TYPE_SRV:
621 elif info[0] == _TYPE_SRV:
622 rec = DNSService(
622 rec = DNSService(
623 domain,
623 domain,
624 info[0],
624 info[0],
625 info[1],
625 info[1],
626 info[2],
626 info[2],
627 self.readUnsignedShort(),
627 self.readUnsignedShort(),
628 self.readUnsignedShort(),
628 self.readUnsignedShort(),
629 self.readUnsignedShort(),
629 self.readUnsignedShort(),
630 self.readName(),
630 self.readName(),
631 )
631 )
632 elif info[0] == _TYPE_HINFO:
632 elif info[0] == _TYPE_HINFO:
633 rec = DNSHinfo(
633 rec = DNSHinfo(
634 domain,
634 domain,
635 info[0],
635 info[0],
636 info[1],
636 info[1],
637 info[2],
637 info[2],
638 self.readCharacterString(),
638 self.readCharacterString(),
639 self.readCharacterString(),
639 self.readCharacterString(),
640 )
640 )
641 elif info[0] == _TYPE_AAAA:
641 elif info[0] == _TYPE_AAAA:
642 rec = DNSAddress(
642 rec = DNSAddress(
643 domain, info[0], info[1], info[2], self.readString(16)
643 domain, info[0], info[1], info[2], self.readString(16)
644 )
644 )
645 else:
645 else:
646 # Try to ignore types we don't know about
646 # Try to ignore types we don't know about
647 # this may mean the rest of the name is
647 # this may mean the rest of the name is
648 # unable to be parsed, and may show errors
648 # unable to be parsed, and may show errors
649 # so this is left for debugging. New types
649 # so this is left for debugging. New types
650 # encountered need to be parsed properly.
650 # encountered need to be parsed properly.
651 #
651 #
652 # print "UNKNOWN TYPE = " + str(info[0])
652 # print "UNKNOWN TYPE = " + str(info[0])
653 # raise BadTypeInNameException
653 # raise BadTypeInNameException
654 self.offset += info[3]
654 self.offset += info[3]
655
655
656 if rec is not None:
656 if rec is not None:
657 self.answers.append(rec)
657 self.answers.append(rec)
658
658
659 def isQuery(self):
659 def isQuery(self):
660 """Returns true if this is a query"""
660 """Returns true if this is a query"""
661 return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
661 return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
662
662
663 def isResponse(self):
663 def isResponse(self):
664 """Returns true if this is a response"""
664 """Returns true if this is a response"""
665 return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
665 return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
666
666
667 def readUTF(self, offset, len):
667 def readUTF(self, offset, len):
668 """Reads a UTF-8 string of a given length from the packet"""
668 """Reads a UTF-8 string of a given length from the packet"""
669 return self.data[offset : offset + len].decode('utf-8')
669 return self.data[offset : offset + len].decode('utf-8')
670
670
671 def readName(self):
671 def readName(self):
672 """Reads a domain name from the packet"""
672 """Reads a domain name from the packet"""
673 result = r''
673 result = r''
674 off = self.offset
674 off = self.offset
675 next = -1
675 next = -1
676 first = off
676 first = off
677
677
678 while True:
678 while True:
679 len = ord(self.data[off : off + 1])
679 len = ord(self.data[off : off + 1])
680 off += 1
680 off += 1
681 if len == 0:
681 if len == 0:
682 break
682 break
683 t = len & 0xC0
683 t = len & 0xC0
684 if t == 0x00:
684 if t == 0x00:
685 result = ''.join((result, self.readUTF(off, len) + '.'))
685 result = ''.join((result, self.readUTF(off, len) + '.'))
686 off += len
686 off += len
687 elif t == 0xC0:
687 elif t == 0xC0:
688 if next < 0:
688 if next < 0:
689 next = off + 1
689 next = off + 1
690 off = ((len & 0x3F) << 8) | ord(self.data[off : off + 1])
690 off = ((len & 0x3F) << 8) | ord(self.data[off : off + 1])
691 if off >= first:
691 if off >= first:
692 raise BadDomainNameCircular(off)
692 raise BadDomainNameCircular(off)
693 first = off
693 first = off
694 else:
694 else:
695 raise BadDomainName(off)
695 raise BadDomainName(off)
696
696
697 if next >= 0:
697 if next >= 0:
698 self.offset = next
698 self.offset = next
699 else:
699 else:
700 self.offset = off
700 self.offset = off
701
701
702 return result
702 return result
703
703
704
704
705 class DNSOutgoing:
705 class DNSOutgoing:
706 """Object representation of an outgoing packet"""
706 """Object representation of an outgoing packet"""
707
707
708 def __init__(self, flags, multicast=1):
708 def __init__(self, flags, multicast=1):
709 self.finished = 0
709 self.finished = 0
710 self.id = 0
710 self.id = 0
711 self.multicast = multicast
711 self.multicast = multicast
712 self.flags = flags
712 self.flags = flags
713 self.names = {}
713 self.names = {}
714 self.data = []
714 self.data = []
715 self.size = 12
715 self.size = 12
716
716
717 self.questions = []
717 self.questions = []
718 self.answers = []
718 self.answers = []
719 self.authorities = []
719 self.authorities = []
720 self.additionals = []
720 self.additionals = []
721
721
722 def addQuestion(self, record):
722 def addQuestion(self, record):
723 """Adds a question"""
723 """Adds a question"""
724 self.questions.append(record)
724 self.questions.append(record)
725
725
726 def addAnswer(self, inp, record):
726 def addAnswer(self, inp, record):
727 """Adds an answer"""
727 """Adds an answer"""
728 if not record.suppressedBy(inp):
728 if not record.suppressedBy(inp):
729 self.addAnswerAtTime(record, 0)
729 self.addAnswerAtTime(record, 0)
730
730
731 def addAnswerAtTime(self, record, now):
731 def addAnswerAtTime(self, record, now):
732 """Adds an answer if if does not expire by a certain time"""
732 """Adds an answer if if does not expire by a certain time"""
733 if record is not None:
733 if record is not None:
734 if now == 0 or not record.isExpired(now):
734 if now == 0 or not record.isExpired(now):
735 self.answers.append((record, now))
735 self.answers.append((record, now))
736
736
737 def addAuthoritativeAnswer(self, record):
737 def addAuthoritativeAnswer(self, record):
738 """Adds an authoritative answer"""
738 """Adds an authoritative answer"""
739 self.authorities.append(record)
739 self.authorities.append(record)
740
740
741 def addAdditionalAnswer(self, record):
741 def addAdditionalAnswer(self, record):
742 """Adds an additional answer"""
742 """Adds an additional answer"""
743 self.additionals.append(record)
743 self.additionals.append(record)
744
744
745 def writeByte(self, value):
745 def writeByte(self, value):
746 """Writes a single byte to the packet"""
746 """Writes a single byte to the packet"""
747 format = b'!c'
747 format = b'!c'
748 self.data.append(struct.pack(format, chr(value)))
748 self.data.append(struct.pack(format, chr(value)))
749 self.size += 1
749 self.size += 1
750
750
751 def insertShort(self, index, value):
751 def insertShort(self, index, value):
752 """Inserts an unsigned short in a certain position in the packet"""
752 """Inserts an unsigned short in a certain position in the packet"""
753 format = b'!H'
753 format = b'!H'
754 self.data.insert(index, struct.pack(format, value))
754 self.data.insert(index, struct.pack(format, value))
755 self.size += 2
755 self.size += 2
756
756
757 def writeShort(self, value):
757 def writeShort(self, value):
758 """Writes an unsigned short to the packet"""
758 """Writes an unsigned short to the packet"""
759 format = b'!H'
759 format = b'!H'
760 self.data.append(struct.pack(format, value))
760 self.data.append(struct.pack(format, value))
761 self.size += 2
761 self.size += 2
762
762
763 def writeInt(self, value):
763 def writeInt(self, value):
764 """Writes an unsigned integer to the packet"""
764 """Writes an unsigned integer to the packet"""
765 format = b'!I'
765 format = b'!I'
766 self.data.append(struct.pack(format, int(value)))
766 self.data.append(struct.pack(format, int(value)))
767 self.size += 4
767 self.size += 4
768
768
769 def writeString(self, value, length):
769 def writeString(self, value, length):
770 """Writes a string to the packet"""
770 """Writes a string to the packet"""
771 format = '!' + str(length) + 's'
771 format = '!' + str(length) + 's'
772 self.data.append(struct.pack(format, value))
772 self.data.append(struct.pack(format, value))
773 self.size += length
773 self.size += length
774
774
775 def writeUTF(self, s):
775 def writeUTF(self, s):
776 """Writes a UTF-8 string of a given length to the packet"""
776 """Writes a UTF-8 string of a given length to the packet"""
777 utfstr = s.encode('utf-8')
777 utfstr = s.encode('utf-8')
778 length = len(utfstr)
778 length = len(utfstr)
779 if length > 64:
779 if length > 64:
780 raise NamePartTooLongException
780 raise NamePartTooLongException
781 self.writeByte(length)
781 self.writeByte(length)
782 self.writeString(utfstr, length)
782 self.writeString(utfstr, length)
783
783
784 def writeName(self, name):
784 def writeName(self, name):
785 """Writes a domain name to the packet"""
785 """Writes a domain name to the packet"""
786
786
787 try:
787 try:
788 # Find existing instance of this name in packet
788 # Find existing instance of this name in packet
789 #
789 #
790 index = self.names[name]
790 index = self.names[name]
791 except KeyError:
791 except KeyError:
792 # No record of this name already, so write it
792 # No record of this name already, so write it
793 # out as normal, recording the location of the name
793 # out as normal, recording the location of the name
794 # for future pointers to it.
794 # for future pointers to it.
795 #
795 #
796 self.names[name] = self.size
796 self.names[name] = self.size
797 parts = name.split(b'.')
797 parts = name.split(b'.')
798 if parts[-1] == b'':
798 if parts[-1] == b'':
799 parts = parts[:-1]
799 parts = parts[:-1]
800 for part in parts:
800 for part in parts:
801 self.writeUTF(part)
801 self.writeUTF(part)
802 self.writeByte(0)
802 self.writeByte(0)
803 return
803 return
804
804
805 # An index was found, so write a pointer to it
805 # An index was found, so write a pointer to it
806 #
806 #
807 self.writeByte((index >> 8) | 0xC0)
807 self.writeByte((index >> 8) | 0xC0)
808 self.writeByte(index)
808 self.writeByte(index)
809
809
810 def writeQuestion(self, question):
810 def writeQuestion(self, question):
811 """Writes a question to the packet"""
811 """Writes a question to the packet"""
812 self.writeName(question.name)
812 self.writeName(question.name)
813 self.writeShort(question.type)
813 self.writeShort(question.type)
814 self.writeShort(question.clazz)
814 self.writeShort(question.clazz)
815
815
816 def writeRecord(self, record, now):
816 def writeRecord(self, record, now):
817 """Writes a record (answer, authoritative answer, additional) to
817 """Writes a record (answer, authoritative answer, additional) to
818 the packet"""
818 the packet"""
819 self.writeName(record.name)
819 self.writeName(record.name)
820 self.writeShort(record.type)
820 self.writeShort(record.type)
821 if record.unique and self.multicast:
821 if record.unique and self.multicast:
822 self.writeShort(record.clazz | _CLASS_UNIQUE)
822 self.writeShort(record.clazz | _CLASS_UNIQUE)
823 else:
823 else:
824 self.writeShort(record.clazz)
824 self.writeShort(record.clazz)
825 if now == 0:
825 if now == 0:
826 self.writeInt(record.ttl)
826 self.writeInt(record.ttl)
827 else:
827 else:
828 self.writeInt(record.getRemainingTTL(now))
828 self.writeInt(record.getRemainingTTL(now))
829 index = len(self.data)
829 index = len(self.data)
830 # Adjust size for the short we will write before this record
830 # Adjust size for the short we will write before this record
831 #
831 #
832 self.size += 2
832 self.size += 2
833 record.write(self)
833 record.write(self)
834 self.size -= 2
834 self.size -= 2
835
835
836 length = len(b''.join(self.data[index:]))
836 length = len(b''.join(self.data[index:]))
837 self.insertShort(index, length) # Here is the short we adjusted for
837 self.insertShort(index, length) # Here is the short we adjusted for
838
838
839 def packet(self):
839 def packet(self):
840 """Returns a string containing the packet's bytes
840 """Returns a string containing the packet's bytes
841
841
842 No further parts should be added to the packet once this
842 No further parts should be added to the packet once this
843 is done."""
843 is done."""
844 if not self.finished:
844 if not self.finished:
845 self.finished = 1
845 self.finished = 1
846 for question in self.questions:
846 for question in self.questions:
847 self.writeQuestion(question)
847 self.writeQuestion(question)
848 for answer, time_ in self.answers:
848 for answer, time_ in self.answers:
849 self.writeRecord(answer, time_)
849 self.writeRecord(answer, time_)
850 for authority in self.authorities:
850 for authority in self.authorities:
851 self.writeRecord(authority, 0)
851 self.writeRecord(authority, 0)
852 for additional in self.additionals:
852 for additional in self.additionals:
853 self.writeRecord(additional, 0)
853 self.writeRecord(additional, 0)
854
854
855 self.insertShort(0, len(self.additionals))
855 self.insertShort(0, len(self.additionals))
856 self.insertShort(0, len(self.authorities))
856 self.insertShort(0, len(self.authorities))
857 self.insertShort(0, len(self.answers))
857 self.insertShort(0, len(self.answers))
858 self.insertShort(0, len(self.questions))
858 self.insertShort(0, len(self.questions))
859 self.insertShort(0, self.flags)
859 self.insertShort(0, self.flags)
860 if self.multicast:
860 if self.multicast:
861 self.insertShort(0, 0)
861 self.insertShort(0, 0)
862 else:
862 else:
863 self.insertShort(0, self.id)
863 self.insertShort(0, self.id)
864 return b''.join(self.data)
864 return b''.join(self.data)
865
865
866
866
867 class DNSCache:
867 class DNSCache:
868 """A cache of DNS entries"""
868 """A cache of DNS entries"""
869
869
870 def __init__(self):
870 def __init__(self):
871 self.cache = {}
871 self.cache = {}
872
872
873 def add(self, entry):
873 def add(self, entry):
874 """Adds an entry"""
874 """Adds an entry"""
875 try:
875 try:
876 list = self.cache[entry.key]
876 list = self.cache[entry.key]
877 except KeyError:
877 except KeyError:
878 list = self.cache[entry.key] = []
878 list = self.cache[entry.key] = []
879 list.append(entry)
879 list.append(entry)
880
880
881 def remove(self, entry):
881 def remove(self, entry):
882 """Removes an entry"""
882 """Removes an entry"""
883 try:
883 try:
884 list = self.cache[entry.key]
884 list = self.cache[entry.key]
885 list.remove(entry)
885 list.remove(entry)
886 except KeyError:
886 except KeyError:
887 pass
887 pass
888
888
889 def get(self, entry):
889 def get(self, entry):
890 """Gets an entry by key. Will return None if there is no
890 """Gets an entry by key. Will return None if there is no
891 matching entry."""
891 matching entry."""
892 try:
892 try:
893 list = self.cache[entry.key]
893 list = self.cache[entry.key]
894 return list[list.index(entry)]
894 return list[list.index(entry)]
895 except (KeyError, ValueError):
895 except (KeyError, ValueError):
896 return None
896 return None
897
897
898 def getByDetails(self, name, type, clazz):
898 def getByDetails(self, name, type, clazz):
899 """Gets an entry by details. Will return None if there is
899 """Gets an entry by details. Will return None if there is
900 no matching entry."""
900 no matching entry."""
901 entry = DNSEntry(name, type, clazz)
901 entry = DNSEntry(name, type, clazz)
902 return self.get(entry)
902 return self.get(entry)
903
903
904 def entriesWithName(self, name):
904 def entriesWithName(self, name):
905 """Returns a list of entries whose key matches the name."""
905 """Returns a list of entries whose key matches the name."""
906 try:
906 try:
907 return self.cache[name]
907 return self.cache[name]
908 except KeyError:
908 except KeyError:
909 return []
909 return []
910
910
911 def entries(self):
911 def entries(self):
912 """Returns a list of all entries"""
912 """Returns a list of all entries"""
913 try:
913 try:
914 return list(itertools.chain.from_iterable(self.cache.values()))
914 return list(itertools.chain.from_iterable(self.cache.values()))
915 except Exception:
915 except Exception:
916 return []
916 return []
917
917
918
918
919 class Engine(threading.Thread):
919 class Engine(threading.Thread):
920 """An engine wraps read access to sockets, allowing objects that
920 """An engine wraps read access to sockets, allowing objects that
921 need to receive data from sockets to be called back when the
921 need to receive data from sockets to be called back when the
922 sockets are ready.
922 sockets are ready.
923
923
924 A reader needs a handle_read() method, which is called when the socket
924 A reader needs a handle_read() method, which is called when the socket
925 it is interested in is ready for reading.
925 it is interested in is ready for reading.
926
926
927 Writers are not implemented here, because we only send short
927 Writers are not implemented here, because we only send short
928 packets.
928 packets.
929 """
929 """
930
930
931 def __init__(self, zeroconf):
931 def __init__(self, zeroconf):
932 threading.Thread.__init__(self)
932 threading.Thread.__init__(self)
933 self.zeroconf = zeroconf
933 self.zeroconf = zeroconf
934 self.readers = {} # maps socket to reader
934 self.readers = {} # maps socket to reader
935 self.timeout = 5
935 self.timeout = 5
936 self.condition = threading.Condition()
936 self.condition = threading.Condition()
937 self.start()
937 self.start()
938
938
939 def run(self):
939 def run(self):
940 while not globals()[b'_GLOBAL_DONE']:
940 while not globals()[b'_GLOBAL_DONE']:
941 rs = self.getReaders()
941 rs = self.getReaders()
942 if len(rs) == 0:
942 if len(rs) == 0:
943 # No sockets to manage, but we wait for the timeout
943 # No sockets to manage, but we wait for the timeout
944 # or addition of a socket
944 # or addition of a socket
945 #
945 #
946 self.condition.acquire()
946 self.condition.acquire()
947 self.condition.wait(self.timeout)
947 self.condition.wait(self.timeout)
948 self.condition.release()
948 self.condition.release()
949 else:
949 else:
950 try:
950 try:
951 rr, wr, er = select.select(rs, [], [], self.timeout)
951 rr, wr, er = select.select(rs, [], [], self.timeout)
952 for sock in rr:
952 for sock in rr:
953 try:
953 try:
954 self.readers[sock].handle_read()
954 self.readers[sock].handle_read()
955 except Exception:
955 except Exception:
956 if not globals()[b'_GLOBAL_DONE']:
956 if not globals()[b'_GLOBAL_DONE']:
957 traceback.print_exc()
957 traceback.print_exc()
958 except Exception:
958 except Exception:
959 pass
959 pass
960
960
961 def getReaders(self):
961 def getReaders(self):
962 self.condition.acquire()
962 self.condition.acquire()
963 result = self.readers.keys()
963 result = self.readers.keys()
964 self.condition.release()
964 self.condition.release()
965 return result
965 return result
966
966
967 def addReader(self, reader, socket):
967 def addReader(self, reader, socket):
968 self.condition.acquire()
968 self.condition.acquire()
969 self.readers[socket] = reader
969 self.readers[socket] = reader
970 self.condition.notify()
970 self.condition.notify()
971 self.condition.release()
971 self.condition.release()
972
972
973 def delReader(self, socket):
973 def delReader(self, socket):
974 self.condition.acquire()
974 self.condition.acquire()
975 del self.readers[socket]
975 del self.readers[socket]
976 self.condition.notify()
976 self.condition.notify()
977 self.condition.release()
977 self.condition.release()
978
978
979 def notify(self):
979 def notify(self):
980 self.condition.acquire()
980 self.condition.acquire()
981 self.condition.notify()
981 self.condition.notify()
982 self.condition.release()
982 self.condition.release()
983
983
984
984
985 class Listener:
985 class Listener:
986 """A Listener is used by this module to listen on the multicast
986 """A Listener is used by this module to listen on the multicast
987 group to which DNS messages are sent, allowing the implementation
987 group to which DNS messages are sent, allowing the implementation
988 to cache information as it arrives.
988 to cache information as it arrives.
989
989
990 It requires registration with an Engine object in order to have
990 It requires registration with an Engine object in order to have
991 the read() method called when a socket is available for reading."""
991 the read() method called when a socket is available for reading."""
992
992
993 def __init__(self, zeroconf):
993 def __init__(self, zeroconf):
994 self.zeroconf = zeroconf
994 self.zeroconf = zeroconf
995 self.zeroconf.engine.addReader(self, self.zeroconf.socket)
995 self.zeroconf.engine.addReader(self, self.zeroconf.socket)
996
996
997 def handle_read(self):
997 def handle_read(self):
998 sock = self.zeroconf.socket
998 sock = self.zeroconf.socket
999 try:
999 try:
1000 data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
1000 data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
1001 except socket.error as e:
1001 except socket.error as e:
1002 if e.errno == errno.EBADF:
1002 if e.errno == errno.EBADF:
1003 # some other thread may close the socket
1003 # some other thread may close the socket
1004 return
1004 return
1005 else:
1005 else:
1006 raise
1006 raise
1007 self.data = data
1007 self.data = data
1008 msg = DNSIncoming(data)
1008 msg = DNSIncoming(data)
1009 if msg.isQuery():
1009 if msg.isQuery():
1010 # Always multicast responses
1010 # Always multicast responses
1011 #
1011 #
1012 if port == _MDNS_PORT:
1012 if port == _MDNS_PORT:
1013 self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
1013 self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
1014 # If it's not a multicast query, reply via unicast
1014 # If it's not a multicast query, reply via unicast
1015 # and multicast
1015 # and multicast
1016 #
1016 #
1017 elif port == _DNS_PORT:
1017 elif port == _DNS_PORT:
1018 self.zeroconf.handleQuery(msg, addr, port)
1018 self.zeroconf.handleQuery(msg, addr, port)
1019 self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
1019 self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
1020 else:
1020 else:
1021 self.zeroconf.handleResponse(msg)
1021 self.zeroconf.handleResponse(msg)
1022
1022
1023
1023
1024 class Reaper(threading.Thread):
1024 class Reaper(threading.Thread):
1025 """A Reaper is used by this module to remove cache entries that
1025 """A Reaper is used by this module to remove cache entries that
1026 have expired."""
1026 have expired."""
1027
1027
1028 def __init__(self, zeroconf):
1028 def __init__(self, zeroconf):
1029 threading.Thread.__init__(self)
1029 threading.Thread.__init__(self)
1030 self.zeroconf = zeroconf
1030 self.zeroconf = zeroconf
1031 self.start()
1031 self.start()
1032
1032
1033 def run(self):
1033 def run(self):
1034 while True:
1034 while True:
1035 self.zeroconf.wait(10 * 1000)
1035 self.zeroconf.wait(10 * 1000)
1036 if globals()[b'_GLOBAL_DONE']:
1036 if globals()[b'_GLOBAL_DONE']:
1037 return
1037 return
1038 now = currentTimeMillis()
1038 now = currentTimeMillis()
1039 for record in self.zeroconf.cache.entries():
1039 for record in self.zeroconf.cache.entries():
1040 if record.isExpired(now):
1040 if record.isExpired(now):
1041 self.zeroconf.updateRecord(now, record)
1041 self.zeroconf.updateRecord(now, record)
1042 self.zeroconf.cache.remove(record)
1042 self.zeroconf.cache.remove(record)
1043
1043
1044
1044
1045 class ServiceBrowser(threading.Thread):
1045 class ServiceBrowser(threading.Thread):
1046 """Used to browse for a service of a specific type.
1046 """Used to browse for a service of a specific type.
1047
1047
1048 The listener object will have its addService() and
1048 The listener object will have its addService() and
1049 removeService() methods called when this browser
1049 removeService() methods called when this browser
1050 discovers changes in the services availability."""
1050 discovers changes in the services availability."""
1051
1051
1052 def __init__(self, zeroconf, type, listener):
1052 def __init__(self, zeroconf, type, listener):
1053 """Creates a browser for a specific type"""
1053 """Creates a browser for a specific type"""
1054 threading.Thread.__init__(self)
1054 threading.Thread.__init__(self)
1055 self.zeroconf = zeroconf
1055 self.zeroconf = zeroconf
1056 self.type = type
1056 self.type = type
1057 self.listener = listener
1057 self.listener = listener
1058 self.services = {}
1058 self.services = {}
1059 self.nexttime = currentTimeMillis()
1059 self.nexttime = currentTimeMillis()
1060 self.delay = _BROWSER_TIME
1060 self.delay = _BROWSER_TIME
1061 self.list = []
1061 self.list = []
1062
1062
1063 self.done = 0
1063 self.done = 0
1064
1064
1065 self.zeroconf.addListener(
1065 self.zeroconf.addListener(
1066 self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)
1066 self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)
1067 )
1067 )
1068 self.start()
1068 self.start()
1069
1069
1070 def updateRecord(self, zeroconf, now, record):
1070 def updateRecord(self, zeroconf, now, record):
1071 """Callback invoked by Zeroconf when new information arrives.
1071 """Callback invoked by Zeroconf when new information arrives.
1072
1072
1073 Updates information required by browser in the Zeroconf cache."""
1073 Updates information required by browser in the Zeroconf cache."""
1074 if record.type == _TYPE_PTR and record.name == self.type:
1074 if record.type == _TYPE_PTR and record.name == self.type:
1075 expired = record.isExpired(now)
1075 expired = record.isExpired(now)
1076 try:
1076 try:
1077 oldrecord = self.services[record.alias.lower()]
1077 oldrecord = self.services[record.alias.lower()]
1078 if not expired:
1078 if not expired:
1079 oldrecord.resetTTL(record)
1079 oldrecord.resetTTL(record)
1080 else:
1080 else:
1081 del self.services[record.alias.lower()]
1081 del self.services[record.alias.lower()]
1082 callback = lambda x: self.listener.removeService(
1082 callback = lambda x: self.listener.removeService(
1083 x, self.type, record.alias
1083 x, self.type, record.alias
1084 )
1084 )
1085 self.list.append(callback)
1085 self.list.append(callback)
1086 return
1086 return
1087 except Exception:
1087 except Exception:
1088 if not expired:
1088 if not expired:
1089 self.services[record.alias.lower()] = record
1089 self.services[record.alias.lower()] = record
1090 callback = lambda x: self.listener.addService(
1090 callback = lambda x: self.listener.addService(
1091 x, self.type, record.alias
1091 x, self.type, record.alias
1092 )
1092 )
1093 self.list.append(callback)
1093 self.list.append(callback)
1094
1094
1095 expires = record.getExpirationTime(75)
1095 expires = record.getExpirationTime(75)
1096 if expires < self.nexttime:
1096 if expires < self.nexttime:
1097 self.nexttime = expires
1097 self.nexttime = expires
1098
1098
1099 def cancel(self):
1099 def cancel(self):
1100 self.done = 1
1100 self.done = 1
1101 self.zeroconf.notifyAll()
1101 self.zeroconf.notifyAll()
1102
1102
1103 def run(self):
1103 def run(self):
1104 while True:
1104 while True:
1105 event = None
1105 event = None
1106 now = currentTimeMillis()
1106 now = currentTimeMillis()
1107 if len(self.list) == 0 and self.nexttime > now:
1107 if len(self.list) == 0 and self.nexttime > now:
1108 self.zeroconf.wait(self.nexttime - now)
1108 self.zeroconf.wait(self.nexttime - now)
1109 if globals()[b'_GLOBAL_DONE'] or self.done:
1109 if globals()[b'_GLOBAL_DONE'] or self.done:
1110 return
1110 return
1111 now = currentTimeMillis()
1111 now = currentTimeMillis()
1112
1112
1113 if self.nexttime <= now:
1113 if self.nexttime <= now:
1114 out = DNSOutgoing(_FLAGS_QR_QUERY)
1114 out = DNSOutgoing(_FLAGS_QR_QUERY)
1115 out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
1115 out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
1116 for record in self.services.values():
1116 for record in self.services.values():
1117 if not record.isExpired(now):
1117 if not record.isExpired(now):
1118 out.addAnswerAtTime(record, now)
1118 out.addAnswerAtTime(record, now)
1119 self.zeroconf.send(out)
1119 self.zeroconf.send(out)
1120 self.nexttime = now + self.delay
1120 self.nexttime = now + self.delay
1121 self.delay = min(20 * 1000, self.delay * 2)
1121 self.delay = min(20 * 1000, self.delay * 2)
1122
1122
1123 if len(self.list) > 0:
1123 if len(self.list) > 0:
1124 event = self.list.pop(0)
1124 event = self.list.pop(0)
1125
1125
1126 if event is not None:
1126 if event is not None:
1127 event(self.zeroconf)
1127 event(self.zeroconf)
1128
1128
1129
1129
1130 class ServiceInfo:
1130 class ServiceInfo:
1131 """Service information"""
1131 """Service information"""
1132
1132
1133 def __init__(
1133 def __init__(
1134 self,
1134 self,
1135 type,
1135 type,
1136 name,
1136 name,
1137 address=None,
1137 address=None,
1138 port=None,
1138 port=None,
1139 weight=0,
1139 weight=0,
1140 priority=0,
1140 priority=0,
1141 properties=None,
1141 properties=None,
1142 server=None,
1142 server=None,
1143 ):
1143 ):
1144 """Create a service description.
1144 """Create a service description.
1145
1145
1146 type: fully qualified service type name
1146 type: fully qualified service type name
1147 name: fully qualified service name
1147 name: fully qualified service name
1148 address: IP address as unsigned short, network byte order
1148 address: IP address as unsigned short, network byte order
1149 port: port that the service runs on
1149 port: port that the service runs on
1150 weight: weight of the service
1150 weight: weight of the service
1151 priority: priority of the service
1151 priority: priority of the service
1152 properties: dictionary of properties (or a string holding the bytes for
1152 properties: dictionary of properties (or a string holding the bytes for
1153 the text field)
1153 the text field)
1154 server: fully qualified name for service host (defaults to name)"""
1154 server: fully qualified name for service host (defaults to name)"""
1155
1155
1156 if not name.endswith(type):
1156 if not name.endswith(type):
1157 raise BadTypeInNameException
1157 raise BadTypeInNameException
1158 self.type = type
1158 self.type = type
1159 self.name = name
1159 self.name = name
1160 self.address = address
1160 self.address = address
1161 self.port = port
1161 self.port = port
1162 self.weight = weight
1162 self.weight = weight
1163 self.priority = priority
1163 self.priority = priority
1164 if server:
1164 if server:
1165 self.server = server
1165 self.server = server
1166 else:
1166 else:
1167 self.server = name
1167 self.server = name
1168 self.setProperties(properties)
1168 self.setProperties(properties)
1169
1169
1170 def setProperties(self, properties):
1170 def setProperties(self, properties):
1171 """Sets properties and text of this info from a dictionary"""
1171 """Sets properties and text of this info from a dictionary"""
1172 if isinstance(properties, dict):
1172 if isinstance(properties, dict):
1173 self.properties = properties
1173 self.properties = properties
1174 list = []
1174 list = []
1175 result = b''
1175 result = b''
1176 for key in properties:
1176 for key in properties:
1177 value = properties[key]
1177 value = properties[key]
1178 if value is None:
1178 if value is None:
1179 suffix = b''
1179 suffix = b''
1180 elif isinstance(value, str):
1180 elif isinstance(value, str):
1181 suffix = value
1181 suffix = value
1182 elif isinstance(value, int):
1182 elif isinstance(value, int):
1183 if value:
1183 if value:
1184 suffix = b'true'
1184 suffix = b'true'
1185 else:
1185 else:
1186 suffix = b'false'
1186 suffix = b'false'
1187 else:
1187 else:
1188 suffix = b''
1188 suffix = b''
1189 list.append(b'='.join((key, suffix)))
1189 list.append(b'='.join((key, suffix)))
1190 for item in list:
1190 for item in list:
1191 result = b''.join(
1191 result = b''.join(
1192 (
1192 (
1193 result,
1193 result,
1194 struct.pack(b'!c', pycompat.bytechr(len(item))),
1194 struct.pack(b'!c', pycompat.bytechr(len(item))),
1195 item,
1195 item,
1196 )
1196 )
1197 )
1197 )
1198 self.text = result
1198 self.text = result
1199 else:
1199 else:
1200 self.text = properties
1200 self.text = properties
1201
1201
1202 def setText(self, text):
1202 def setText(self, text):
1203 """Sets properties and text given a text field"""
1203 """Sets properties and text given a text field"""
1204 self.text = text
1204 self.text = text
1205 try:
1205 try:
1206 result = {}
1206 result = {}
1207 end = len(text)
1207 end = len(text)
1208 index = 0
1208 index = 0
1209 strs = []
1209 strs = []
1210 while index < end:
1210 while index < end:
1211 length = ord(text[index])
1211 length = ord(text[index])
1212 index += 1
1212 index += 1
1213 strs.append(text[index : index + length])
1213 strs.append(text[index : index + length])
1214 index += length
1214 index += length
1215
1215
1216 for s in strs:
1216 for s in strs:
1217 eindex = s.find(b'=')
1217 eindex = s.find(b'=')
1218 if eindex == -1:
1218 if eindex == -1:
1219 # No equals sign at all
1219 # No equals sign at all
1220 key = s
1220 key = s
1221 value = 0
1221 value = 0
1222 else:
1222 else:
1223 key = s[:eindex]
1223 key = s[:eindex]
1224 value = s[eindex + 1 :]
1224 value = s[eindex + 1 :]
1225 if value == b'true':
1225 if value == b'true':
1226 value = 1
1226 value = 1
1227 elif value == b'false' or not value:
1227 elif value == b'false' or not value:
1228 value = 0
1228 value = 0
1229
1229
1230 # Only update non-existent properties
1230 # Only update non-existent properties
1231 if key and result.get(key) is None:
1231 if key and result.get(key) is None:
1232 result[key] = value
1232 result[key] = value
1233
1233
1234 self.properties = result
1234 self.properties = result
1235 except Exception:
1235 except Exception:
1236 traceback.print_exc()
1236 traceback.print_exc()
1237 self.properties = None
1237 self.properties = None
1238
1238
1239 def getType(self):
1239 def getType(self):
1240 """Type accessor"""
1240 """Type accessor"""
1241 return self.type
1241 return self.type
1242
1242
1243 def getName(self):
1243 def getName(self):
1244 """Name accessor"""
1244 """Name accessor"""
1245 if self.type is not None and self.name.endswith(b"." + self.type):
1245 if self.type is not None and self.name.endswith(b"." + self.type):
1246 return self.name[: len(self.name) - len(self.type) - 1]
1246 return self.name[: len(self.name) - len(self.type) - 1]
1247 return self.name
1247 return self.name
1248
1248
1249 def getAddress(self):
1249 def getAddress(self):
1250 """Address accessor"""
1250 """Address accessor"""
1251 return self.address
1251 return self.address
1252
1252
1253 def getPort(self):
1253 def getPort(self):
1254 """Port accessor"""
1254 """Port accessor"""
1255 return self.port
1255 return self.port
1256
1256
1257 def getPriority(self):
1257 def getPriority(self):
1258 """Priority accessor"""
1258 """Priority accessor"""
1259 return self.priority
1259 return self.priority
1260
1260
1261 def getWeight(self):
1261 def getWeight(self):
1262 """Weight accessor"""
1262 """Weight accessor"""
1263 return self.weight
1263 return self.weight
1264
1264
1265 def getProperties(self):
1265 def getProperties(self):
1266 """Properties accessor"""
1266 """Properties accessor"""
1267 return self.properties
1267 return self.properties
1268
1268
1269 def getText(self):
1269 def getText(self):
1270 """Text accessor"""
1270 """Text accessor"""
1271 return self.text
1271 return self.text
1272
1272
1273 def getServer(self):
1273 def getServer(self):
1274 """Server accessor"""
1274 """Server accessor"""
1275 return self.server
1275 return self.server
1276
1276
1277 def updateRecord(self, zeroconf, now, record):
1277 def updateRecord(self, zeroconf, now, record):
1278 """Updates service information from a DNS record"""
1278 """Updates service information from a DNS record"""
1279 if record is not None and not record.isExpired(now):
1279 if record is not None and not record.isExpired(now):
1280 if record.type == _TYPE_A:
1280 if record.type == _TYPE_A:
1281 # if record.name == self.name:
1281 # if record.name == self.name:
1282 if record.name == self.server:
1282 if record.name == self.server:
1283 self.address = record.address
1283 self.address = record.address
1284 elif record.type == _TYPE_SRV:
1284 elif record.type == _TYPE_SRV:
1285 if record.name == self.name:
1285 if record.name == self.name:
1286 self.server = record.server
1286 self.server = record.server
1287 self.port = record.port
1287 self.port = record.port
1288 self.weight = record.weight
1288 self.weight = record.weight
1289 self.priority = record.priority
1289 self.priority = record.priority
1290 # self.address = None
1290 # self.address = None
1291 self.updateRecord(
1291 self.updateRecord(
1292 zeroconf,
1292 zeroconf,
1293 now,
1293 now,
1294 zeroconf.cache.getByDetails(
1294 zeroconf.cache.getByDetails(
1295 self.server, _TYPE_A, _CLASS_IN
1295 self.server, _TYPE_A, _CLASS_IN
1296 ),
1296 ),
1297 )
1297 )
1298 elif record.type == _TYPE_TXT:
1298 elif record.type == _TYPE_TXT:
1299 if record.name == self.name:
1299 if record.name == self.name:
1300 self.setText(record.text)
1300 self.setText(record.text)
1301
1301
1302 def request(self, zeroconf, timeout):
1302 def request(self, zeroconf, timeout):
1303 """Returns true if the service could be discovered on the
1303 """Returns true if the service could be discovered on the
1304 network, and updates this object with details discovered.
1304 network, and updates this object with details discovered.
1305 """
1305 """
1306 now = currentTimeMillis()
1306 now = currentTimeMillis()
1307 delay = _LISTENER_TIME
1307 delay = _LISTENER_TIME
1308 next = now + delay
1308 next = now + delay
1309 last = now + timeout
1309 last = now + timeout
1310 result = False
1310 result = False
1311 try:
1311 try:
1312 zeroconf.addListener(
1312 zeroconf.addListener(
1313 self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)
1313 self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)
1314 )
1314 )
1315 while (
1315 while (
1316 self.server is None or self.address is None or self.text is None
1316 self.server is None or self.address is None or self.text is None
1317 ):
1317 ):
1318 if last <= now:
1318 if last <= now:
1319 return 0
1319 return 0
1320 if next <= now:
1320 if next <= now:
1321 out = DNSOutgoing(_FLAGS_QR_QUERY)
1321 out = DNSOutgoing(_FLAGS_QR_QUERY)
1322 out.addQuestion(
1322 out.addQuestion(
1323 DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)
1323 DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)
1324 )
1324 )
1325 out.addAnswerAtTime(
1325 out.addAnswerAtTime(
1326 zeroconf.cache.getByDetails(
1326 zeroconf.cache.getByDetails(
1327 self.name, _TYPE_SRV, _CLASS_IN
1327 self.name, _TYPE_SRV, _CLASS_IN
1328 ),
1328 ),
1329 now,
1329 now,
1330 )
1330 )
1331 out.addQuestion(
1331 out.addQuestion(
1332 DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)
1332 DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)
1333 )
1333 )
1334 out.addAnswerAtTime(
1334 out.addAnswerAtTime(
1335 zeroconf.cache.getByDetails(
1335 zeroconf.cache.getByDetails(
1336 self.name, _TYPE_TXT, _CLASS_IN
1336 self.name, _TYPE_TXT, _CLASS_IN
1337 ),
1337 ),
1338 now,
1338 now,
1339 )
1339 )
1340 if self.server is not None:
1340 if self.server is not None:
1341 out.addQuestion(
1341 out.addQuestion(
1342 DNSQuestion(self.server, _TYPE_A, _CLASS_IN)
1342 DNSQuestion(self.server, _TYPE_A, _CLASS_IN)
1343 )
1343 )
1344 out.addAnswerAtTime(
1344 out.addAnswerAtTime(
1345 zeroconf.cache.getByDetails(
1345 zeroconf.cache.getByDetails(
1346 self.server, _TYPE_A, _CLASS_IN
1346 self.server, _TYPE_A, _CLASS_IN
1347 ),
1347 ),
1348 now,
1348 now,
1349 )
1349 )
1350 zeroconf.send(out)
1350 zeroconf.send(out)
1351 next = now + delay
1351 next = now + delay
1352 delay = delay * 2
1352 delay = delay * 2
1353
1353
1354 zeroconf.wait(min(next, last) - now)
1354 zeroconf.wait(min(next, last) - now)
1355 now = currentTimeMillis()
1355 now = currentTimeMillis()
1356 result = True
1356 result = True
1357 finally:
1357 finally:
1358 zeroconf.removeListener(self)
1358 zeroconf.removeListener(self)
1359
1359
1360 return result
1360 return result
1361
1361
1362 def __eq__(self, other):
1362 def __eq__(self, other):
1363 """Tests equality of service name"""
1363 """Tests equality of service name"""
1364 if isinstance(other, ServiceInfo):
1364 if isinstance(other, ServiceInfo):
1365 return other.name == self.name
1365 return other.name == self.name
1366 return 0
1366 return 0
1367
1367
1368 def __ne__(self, other):
1368 def __ne__(self, other):
1369 """Non-equality test"""
1369 """Non-equality test"""
1370 return not self.__eq__(other)
1370 return not self.__eq__(other)
1371
1371
1372 def __repr__(self):
1372 def __repr__(self):
1373 """String representation"""
1373 """String representation"""
1374 result = b"service[%s,%s:%s," % (
1374 result = b"service[%s,%s:%s," % (
1375 self.name,
1375 self.name,
1376 socket.inet_ntoa(self.getAddress()),
1376 socket.inet_ntoa(self.getAddress()),
1377 self.port,
1377 self.port,
1378 )
1378 )
1379 if self.text is None:
1379 if self.text is None:
1380 result += b"None"
1380 result += b"None"
1381 else:
1381 else:
1382 if len(self.text) < 20:
1382 if len(self.text) < 20:
1383 result += self.text
1383 result += self.text
1384 else:
1384 else:
1385 result += self.text[:17] + b"..."
1385 result += self.text[:17] + b"..."
1386 result += b"]"
1386 result += b"]"
1387 return result
1387 return result
1388
1388
1389
1389
1390 class Zeroconf:
1390 class Zeroconf:
1391 """Implementation of Zeroconf Multicast DNS Service Discovery
1391 """Implementation of Zeroconf Multicast DNS Service Discovery
1392
1392
1393 Supports registration, unregistration, queries and browsing.
1393 Supports registration, unregistration, queries and browsing.
1394 """
1394 """
1395
1395
1396 def __init__(self, bindaddress=None):
1396 def __init__(self, bindaddress=None):
1397 """Creates an instance of the Zeroconf class, establishing
1397 """Creates an instance of the Zeroconf class, establishing
1398 multicast communications, listening and reaping threads."""
1398 multicast communications, listening and reaping threads."""
1399 globals()[b'_GLOBAL_DONE'] = 0
1399 globals()[b'_GLOBAL_DONE'] = 0
1400 if bindaddress is None:
1400 if bindaddress is None:
1401 self.intf = socket.gethostbyname(socket.gethostname())
1401 self.intf = socket.gethostbyname(socket.gethostname())
1402 else:
1402 else:
1403 self.intf = bindaddress
1403 self.intf = bindaddress
1404 self.group = (b'', _MDNS_PORT)
1404 self.group = (b'', _MDNS_PORT)
1405 self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
1405 self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
1406 try:
1406 try:
1407 self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
1407 self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
1408 self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
1408 self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
1409 except Exception:
1409 except Exception:
1410 # SO_REUSEADDR should be equivalent to SO_REUSEPORT for
1410 # SO_REUSEADDR should be equivalent to SO_REUSEPORT for
1411 # multicast UDP sockets (p 731, "TCP/IP Illustrated,
1411 # multicast UDP sockets (p 731, "TCP/IP Illustrated,
1412 # Volume 2"), but some BSD-derived systems require
1412 # Volume 2"), but some BSD-derived systems require
1413 # SO_REUSEPORT to be specified explicitly. Also, not all
1413 # SO_REUSEPORT to be specified explicitly. Also, not all
1414 # versions of Python have SO_REUSEPORT available. So
1414 # versions of Python have SO_REUSEPORT available. So
1415 # if you're on a BSD-based system, and haven't upgraded
1415 # if you're on a BSD-based system, and haven't upgraded
1416 # to Python 2.3 yet, you may find this library doesn't
1416 # to Python 2.3 yet, you may find this library doesn't
1417 # work as expected.
1417 # work as expected.
1418 #
1418 #
1419 pass
1419 pass
1420 self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, b"\xff")
1420 self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, b"\xff")
1421 self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, b"\x01")
1421 self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, b"\x01")
1422 try:
1422 try:
1423 self.socket.bind(self.group)
1423 self.socket.bind(self.group)
1424 except Exception:
1424 except Exception:
1425 # Some versions of linux raise an exception even though
1425 # Some versions of linux raise an exception even though
1426 # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
1426 # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
1427 pass
1427 pass
1428 self.socket.setsockopt(
1428 self.socket.setsockopt(
1429 socket.SOL_IP,
1429 socket.SOL_IP,
1430 socket.IP_ADD_MEMBERSHIP,
1430 socket.IP_ADD_MEMBERSHIP,
1431 socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'),
1431 socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'),
1432 )
1432 )
1433
1433
1434 self.listeners = []
1434 self.listeners = []
1435 self.browsers = []
1435 self.browsers = []
1436 self.services = {}
1436 self.services = {}
1437 self.servicetypes = {}
1437 self.servicetypes = {}
1438
1438
1439 self.cache = DNSCache()
1439 self.cache = DNSCache()
1440
1440
1441 self.condition = threading.Condition()
1441 self.condition = threading.Condition()
1442
1442
1443 self.engine = Engine(self)
1443 self.engine = Engine(self)
1444 self.listener = Listener(self)
1444 self.listener = Listener(self)
1445 self.reaper = Reaper(self)
1445 self.reaper = Reaper(self)
1446
1446
1447 def isLoopback(self):
1447 def isLoopback(self):
1448 return self.intf.startswith(b"127.0.0.1")
1448 return self.intf.startswith(b"127.0.0.1")
1449
1449
1450 def isLinklocal(self):
1450 def isLinklocal(self):
1451 return self.intf.startswith(b"169.254.")
1451 return self.intf.startswith(b"169.254.")
1452
1452
1453 def wait(self, timeout):
1453 def wait(self, timeout):
1454 """Calling thread waits for a given number of milliseconds or
1454 """Calling thread waits for a given number of milliseconds or
1455 until notified."""
1455 until notified."""
1456 self.condition.acquire()
1456 self.condition.acquire()
1457 self.condition.wait(timeout / 1000)
1457 self.condition.wait(timeout / 1000)
1458 self.condition.release()
1458 self.condition.release()
1459
1459
1460 def notifyAll(self):
1460 def notifyAll(self):
1461 """Notifies all waiting threads"""
1461 """Notifies all waiting threads"""
1462 self.condition.acquire()
1462 self.condition.acquire()
1463 self.condition.notify_all()
1463 self.condition.notify_all()
1464 self.condition.release()
1464 self.condition.release()
1465
1465
1466 def getServiceInfo(self, type, name, timeout=3000):
1466 def getServiceInfo(self, type, name, timeout=3000):
1467 """Returns network's service information for a particular
1467 """Returns network's service information for a particular
1468 name and type, or None if no service matches by the timeout,
1468 name and type, or None if no service matches by the timeout,
1469 which defaults to 3 seconds."""
1469 which defaults to 3 seconds."""
1470 info = ServiceInfo(type, name)
1470 info = ServiceInfo(type, name)
1471 if info.request(self, timeout):
1471 if info.request(self, timeout):
1472 return info
1472 return info
1473 return None
1473 return None
1474
1474
1475 def addServiceListener(self, type, listener):
1475 def addServiceListener(self, type, listener):
1476 """Adds a listener for a particular service type. This object
1476 """Adds a listener for a particular service type. This object
1477 will then have its updateRecord method called when information
1477 will then have its updateRecord method called when information
1478 arrives for that type."""
1478 arrives for that type."""
1479 self.removeServiceListener(listener)
1479 self.removeServiceListener(listener)
1480 self.browsers.append(ServiceBrowser(self, type, listener))
1480 self.browsers.append(ServiceBrowser(self, type, listener))
1481
1481
1482 def removeServiceListener(self, listener):
1482 def removeServiceListener(self, listener):
1483 """Removes a listener from the set that is currently listening."""
1483 """Removes a listener from the set that is currently listening."""
1484 for browser in self.browsers:
1484 for browser in self.browsers:
1485 if browser.listener == listener:
1485 if browser.listener == listener:
1486 browser.cancel()
1486 browser.cancel()
1487 del browser
1487 del browser
1488
1488
1489 def registerService(self, info, ttl=_DNS_TTL):
1489 def registerService(self, info, ttl=_DNS_TTL):
1490 """Registers service information to the network with a default TTL
1490 """Registers service information to the network with a default TTL
1491 of 60 seconds. Zeroconf will then respond to requests for
1491 of 60 seconds. Zeroconf will then respond to requests for
1492 information for that service. The name of the service may be
1492 information for that service. The name of the service may be
1493 changed if needed to make it unique on the network."""
1493 changed if needed to make it unique on the network."""
1494 self.checkService(info)
1494 self.checkService(info)
1495 self.services[info.name.lower()] = info
1495 self.services[info.name.lower()] = info
1496 if info.type in self.servicetypes:
1496 if info.type in self.servicetypes:
1497 self.servicetypes[info.type] += 1
1497 self.servicetypes[info.type] += 1
1498 else:
1498 else:
1499 self.servicetypes[info.type] = 1
1499 self.servicetypes[info.type] = 1
1500 now = currentTimeMillis()
1500 now = currentTimeMillis()
1501 nexttime = now
1501 nexttime = now
1502 i = 0
1502 i = 0
1503 while i < 3:
1503 while i < 3:
1504 if now < nexttime:
1504 if now < nexttime:
1505 self.wait(nexttime - now)
1505 self.wait(nexttime - now)
1506 now = currentTimeMillis()
1506 now = currentTimeMillis()
1507 continue
1507 continue
1508 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1508 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1509 out.addAnswerAtTime(
1509 out.addAnswerAtTime(
1510 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0
1510 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0
1511 )
1511 )
1512 out.addAnswerAtTime(
1512 out.addAnswerAtTime(
1513 DNSService(
1513 DNSService(
1514 info.name,
1514 info.name,
1515 _TYPE_SRV,
1515 _TYPE_SRV,
1516 _CLASS_IN,
1516 _CLASS_IN,
1517 ttl,
1517 ttl,
1518 info.priority,
1518 info.priority,
1519 info.weight,
1519 info.weight,
1520 info.port,
1520 info.port,
1521 info.server,
1521 info.server,
1522 ),
1522 ),
1523 0,
1523 0,
1524 )
1524 )
1525 out.addAnswerAtTime(
1525 out.addAnswerAtTime(
1526 DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0
1526 DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0
1527 )
1527 )
1528 if info.address:
1528 if info.address:
1529 out.addAnswerAtTime(
1529 out.addAnswerAtTime(
1530 DNSAddress(
1530 DNSAddress(
1531 info.server, _TYPE_A, _CLASS_IN, ttl, info.address
1531 info.server, _TYPE_A, _CLASS_IN, ttl, info.address
1532 ),
1532 ),
1533 0,
1533 0,
1534 )
1534 )
1535 self.send(out)
1535 self.send(out)
1536 i += 1
1536 i += 1
1537 nexttime += _REGISTER_TIME
1537 nexttime += _REGISTER_TIME
1538
1538
1539 def unregisterService(self, info):
1539 def unregisterService(self, info):
1540 """Unregister a service."""
1540 """Unregister a service."""
1541 try:
1541 try:
1542 del self.services[info.name.lower()]
1542 del self.services[info.name.lower()]
1543 if self.servicetypes[info.type] > 1:
1543 if self.servicetypes[info.type] > 1:
1544 self.servicetypes[info.type] -= 1
1544 self.servicetypes[info.type] -= 1
1545 else:
1545 else:
1546 del self.servicetypes[info.type]
1546 del self.servicetypes[info.type]
1547 except KeyError:
1547 except KeyError:
1548 pass
1548 pass
1549 now = currentTimeMillis()
1549 now = currentTimeMillis()
1550 nexttime = now
1550 nexttime = now
1551 i = 0
1551 i = 0
1552 while i < 3:
1552 while i < 3:
1553 if now < nexttime:
1553 if now < nexttime:
1554 self.wait(nexttime - now)
1554 self.wait(nexttime - now)
1555 now = currentTimeMillis()
1555 now = currentTimeMillis()
1556 continue
1556 continue
1557 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1557 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1558 out.addAnswerAtTime(
1558 out.addAnswerAtTime(
1559 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0
1559 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0
1560 )
1560 )
1561 out.addAnswerAtTime(
1561 out.addAnswerAtTime(
1562 DNSService(
1562 DNSService(
1563 info.name,
1563 info.name,
1564 _TYPE_SRV,
1564 _TYPE_SRV,
1565 _CLASS_IN,
1565 _CLASS_IN,
1566 0,
1566 0,
1567 info.priority,
1567 info.priority,
1568 info.weight,
1568 info.weight,
1569 info.port,
1569 info.port,
1570 info.name,
1570 info.name,
1571 ),
1571 ),
1572 0,
1572 0,
1573 )
1573 )
1574 out.addAnswerAtTime(
1574 out.addAnswerAtTime(
1575 DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0
1575 DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0
1576 )
1576 )
1577 if info.address:
1577 if info.address:
1578 out.addAnswerAtTime(
1578 out.addAnswerAtTime(
1579 DNSAddress(
1579 DNSAddress(
1580 info.server, _TYPE_A, _CLASS_IN, 0, info.address
1580 info.server, _TYPE_A, _CLASS_IN, 0, info.address
1581 ),
1581 ),
1582 0,
1582 0,
1583 )
1583 )
1584 self.send(out)
1584 self.send(out)
1585 i += 1
1585 i += 1
1586 nexttime += _UNREGISTER_TIME
1586 nexttime += _UNREGISTER_TIME
1587
1587
1588 def unregisterAllServices(self):
1588 def unregisterAllServices(self):
1589 """Unregister all registered services."""
1589 """Unregister all registered services."""
1590 if len(self.services) > 0:
1590 if len(self.services) > 0:
1591 now = currentTimeMillis()
1591 now = currentTimeMillis()
1592 nexttime = now
1592 nexttime = now
1593 i = 0
1593 i = 0
1594 while i < 3:
1594 while i < 3:
1595 if now < nexttime:
1595 if now < nexttime:
1596 self.wait(nexttime - now)
1596 self.wait(nexttime - now)
1597 now = currentTimeMillis()
1597 now = currentTimeMillis()
1598 continue
1598 continue
1599 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1599 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1600 for info in self.services.values():
1600 for info in self.services.values():
1601 out.addAnswerAtTime(
1601 out.addAnswerAtTime(
1602 DNSPointer(
1602 DNSPointer(
1603 info.type, _TYPE_PTR, _CLASS_IN, 0, info.name
1603 info.type, _TYPE_PTR, _CLASS_IN, 0, info.name
1604 ),
1604 ),
1605 0,
1605 0,
1606 )
1606 )
1607 out.addAnswerAtTime(
1607 out.addAnswerAtTime(
1608 DNSService(
1608 DNSService(
1609 info.name,
1609 info.name,
1610 _TYPE_SRV,
1610 _TYPE_SRV,
1611 _CLASS_IN,
1611 _CLASS_IN,
1612 0,
1612 0,
1613 info.priority,
1613 info.priority,
1614 info.weight,
1614 info.weight,
1615 info.port,
1615 info.port,
1616 info.server,
1616 info.server,
1617 ),
1617 ),
1618 0,
1618 0,
1619 )
1619 )
1620 out.addAnswerAtTime(
1620 out.addAnswerAtTime(
1621 DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text),
1621 DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text),
1622 0,
1622 0,
1623 )
1623 )
1624 if info.address:
1624 if info.address:
1625 out.addAnswerAtTime(
1625 out.addAnswerAtTime(
1626 DNSAddress(
1626 DNSAddress(
1627 info.server, _TYPE_A, _CLASS_IN, 0, info.address
1627 info.server, _TYPE_A, _CLASS_IN, 0, info.address
1628 ),
1628 ),
1629 0,
1629 0,
1630 )
1630 )
1631 self.send(out)
1631 self.send(out)
1632 i += 1
1632 i += 1
1633 nexttime += _UNREGISTER_TIME
1633 nexttime += _UNREGISTER_TIME
1634
1634
1635 def checkService(self, info):
1635 def checkService(self, info):
1636 """Checks the network for a unique service name, modifying the
1636 """Checks the network for a unique service name, modifying the
1637 ServiceInfo passed in if it is not unique."""
1637 ServiceInfo passed in if it is not unique."""
1638 now = currentTimeMillis()
1638 now = currentTimeMillis()
1639 nexttime = now
1639 nexttime = now
1640 i = 0
1640 i = 0
1641 while i < 3:
1641 while i < 3:
1642 for record in self.cache.entriesWithName(info.type):
1642 for record in self.cache.entriesWithName(info.type):
1643 if (
1643 if (
1644 record.type == _TYPE_PTR
1644 record.type == _TYPE_PTR
1645 and not record.isExpired(now)
1645 and not record.isExpired(now)
1646 and record.alias == info.name
1646 and record.alias == info.name
1647 ):
1647 ):
1648 if info.name.find(b'.') < 0:
1648 if info.name.find(b'.') < 0:
1649 info.name = b"%s.[%s:%d].%s" % (
1649 info.name = b"%s.[%s:%d].%s" % (
1650 info.name,
1650 info.name,
1651 info.address,
1651 info.address,
1652 info.port,
1652 info.port,
1653 info.type,
1653 info.type,
1654 )
1654 )
1655 self.checkService(info)
1655 self.checkService(info)
1656 return
1656 return
1657 raise NonUniqueNameException
1657 raise NonUniqueNameException
1658 if now < nexttime:
1658 if now < nexttime:
1659 self.wait(nexttime - now)
1659 self.wait(nexttime - now)
1660 now = currentTimeMillis()
1660 now = currentTimeMillis()
1661 continue
1661 continue
1662 out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
1662 out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
1663 self.debug = out
1663 self.debug = out
1664 out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
1664 out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
1665 out.addAuthoritativeAnswer(
1665 out.addAuthoritativeAnswer(
1666 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)
1666 DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)
1667 )
1667 )
1668 self.send(out)
1668 self.send(out)
1669 i += 1
1669 i += 1
1670 nexttime += _CHECK_TIME
1670 nexttime += _CHECK_TIME
1671
1671
1672 def addListener(self, listener, question):
1672 def addListener(self, listener, question):
1673 """Adds a listener for a given question. The listener will have
1673 """Adds a listener for a given question. The listener will have
1674 its updateRecord method called when information is available to
1674 its updateRecord method called when information is available to
1675 answer the question."""
1675 answer the question."""
1676 now = currentTimeMillis()
1676 now = currentTimeMillis()
1677 self.listeners.append(listener)
1677 self.listeners.append(listener)
1678 if question is not None:
1678 if question is not None:
1679 for record in self.cache.entriesWithName(question.name):
1679 for record in self.cache.entriesWithName(question.name):
1680 if question.answeredBy(record) and not record.isExpired(now):
1680 if question.answeredBy(record) and not record.isExpired(now):
1681 listener.updateRecord(self, now, record)
1681 listener.updateRecord(self, now, record)
1682 self.notifyAll()
1682 self.notifyAll()
1683
1683
1684 def removeListener(self, listener):
1684 def removeListener(self, listener):
1685 """Removes a listener."""
1685 """Removes a listener."""
1686 try:
1686 try:
1687 self.listeners.remove(listener)
1687 self.listeners.remove(listener)
1688 self.notifyAll()
1688 self.notifyAll()
1689 except Exception:
1689 except Exception:
1690 pass
1690 pass
1691
1691
1692 def updateRecord(self, now, rec):
1692 def updateRecord(self, now, rec):
1693 """Used to notify listeners of new information that has updated
1693 """Used to notify listeners of new information that has updated
1694 a record."""
1694 a record."""
1695 for listener in self.listeners:
1695 for listener in self.listeners:
1696 listener.updateRecord(self, now, rec)
1696 listener.updateRecord(self, now, rec)
1697 self.notifyAll()
1697 self.notifyAll()
1698
1698
1699 def handleResponse(self, msg):
1699 def handleResponse(self, msg):
1700 """Deal with incoming response packets. All answers
1700 """Deal with incoming response packets. All answers
1701 are held in the cache, and listeners are notified."""
1701 are held in the cache, and listeners are notified."""
1702 now = currentTimeMillis()
1702 now = currentTimeMillis()
1703 for record in msg.answers:
1703 for record in msg.answers:
1704 expired = record.isExpired(now)
1704 expired = record.isExpired(now)
1705 if record in self.cache.entries():
1705 if record in self.cache.entries():
1706 if expired:
1706 if expired:
1707 self.cache.remove(record)
1707 self.cache.remove(record)
1708 else:
1708 else:
1709 entry = self.cache.get(record)
1709 entry = self.cache.get(record)
1710 if entry is not None:
1710 if entry is not None:
1711 entry.resetTTL(record)
1711 entry.resetTTL(record)
1712 record = entry
1712 record = entry
1713 else:
1713 else:
1714 self.cache.add(record)
1714 self.cache.add(record)
1715
1715
1716 self.updateRecord(now, record)
1716 self.updateRecord(now, record)
1717
1717
1718 def handleQuery(self, msg, addr, port):
1718 def handleQuery(self, msg, addr, port):
1719 """Deal with incoming query packets. Provides a response if
1719 """Deal with incoming query packets. Provides a response if
1720 possible."""
1720 possible."""
1721 out = None
1721 out = None
1722
1722
1723 # Support unicast client responses
1723 # Support unicast client responses
1724 #
1724 #
1725 if port != _MDNS_PORT:
1725 if port != _MDNS_PORT:
1726 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0)
1726 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0)
1727 for question in msg.questions:
1727 for question in msg.questions:
1728 out.addQuestion(question)
1728 out.addQuestion(question)
1729
1729
1730 for question in msg.questions:
1730 for question in msg.questions:
1731 if question.type == _TYPE_PTR:
1731 if question.type == _TYPE_PTR:
1732 if question.name == b"_services._dns-sd._udp.local.":
1732 if question.name == b"_services._dns-sd._udp.local.":
1733 for stype in self.servicetypes.keys():
1733 for stype in self.servicetypes.keys():
1734 if out is None:
1734 if out is None:
1735 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1735 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1736 out.addAnswer(
1736 out.addAnswer(
1737 msg,
1737 msg,
1738 DNSPointer(
1738 DNSPointer(
1739 b"_services._dns-sd._udp.local.",
1739 b"_services._dns-sd._udp.local.",
1740 _TYPE_PTR,
1740 _TYPE_PTR,
1741 _CLASS_IN,
1741 _CLASS_IN,
1742 _DNS_TTL,
1742 _DNS_TTL,
1743 stype,
1743 stype,
1744 ),
1744 ),
1745 )
1745 )
1746 for service in self.services.values():
1746 for service in self.services.values():
1747 if question.name == service.type:
1747 if question.name == service.type:
1748 if out is None:
1748 if out is None:
1749 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1749 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1750 out.addAnswer(
1750 out.addAnswer(
1751 msg,
1751 msg,
1752 DNSPointer(
1752 DNSPointer(
1753 service.type,
1753 service.type,
1754 _TYPE_PTR,
1754 _TYPE_PTR,
1755 _CLASS_IN,
1755 _CLASS_IN,
1756 _DNS_TTL,
1756 _DNS_TTL,
1757 service.name,
1757 service.name,
1758 ),
1758 ),
1759 )
1759 )
1760 else:
1760 else:
1761 try:
1761 try:
1762 if out is None:
1762 if out is None:
1763 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1763 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
1764
1764
1765 # Answer A record queries for any service addresses we know
1765 # Answer A record queries for any service addresses we know
1766 if question.type == _TYPE_A or question.type == _TYPE_ANY:
1766 if question.type == _TYPE_A or question.type == _TYPE_ANY:
1767 for service in self.services.values():
1767 for service in self.services.values():
1768 if service.server == question.name.lower():
1768 if service.server == question.name.lower():
1769 out.addAnswer(
1769 out.addAnswer(
1770 msg,
1770 msg,
1771 DNSAddress(
1771 DNSAddress(
1772 question.name,
1772 question.name,
1773 _TYPE_A,
1773 _TYPE_A,
1774 _CLASS_IN | _CLASS_UNIQUE,
1774 _CLASS_IN | _CLASS_UNIQUE,
1775 _DNS_TTL,
1775 _DNS_TTL,
1776 service.address,
1776 service.address,
1777 ),
1777 ),
1778 )
1778 )
1779
1779
1780 service = self.services.get(question.name.lower(), None)
1780 service = self.services.get(question.name.lower(), None)
1781 if not service:
1781 if not service:
1782 continue
1782 continue
1783
1783
1784 if question.type == _TYPE_SRV or question.type == _TYPE_ANY:
1784 if question.type == _TYPE_SRV or question.type == _TYPE_ANY:
1785 out.addAnswer(
1785 out.addAnswer(
1786 msg,
1786 msg,
1787 DNSService(
1787 DNSService(
1788 question.name,
1788 question.name,
1789 _TYPE_SRV,
1789 _TYPE_SRV,
1790 _CLASS_IN | _CLASS_UNIQUE,
1790 _CLASS_IN | _CLASS_UNIQUE,
1791 _DNS_TTL,
1791 _DNS_TTL,
1792 service.priority,
1792 service.priority,
1793 service.weight,
1793 service.weight,
1794 service.port,
1794 service.port,
1795 service.server,
1795 service.server,
1796 ),
1796 ),
1797 )
1797 )
1798 if question.type == _TYPE_TXT or question.type == _TYPE_ANY:
1798 if question.type == _TYPE_TXT or question.type == _TYPE_ANY:
1799 out.addAnswer(
1799 out.addAnswer(
1800 msg,
1800 msg,
1801 DNSText(
1801 DNSText(
1802 question.name,
1802 question.name,
1803 _TYPE_TXT,
1803 _TYPE_TXT,
1804 _CLASS_IN | _CLASS_UNIQUE,
1804 _CLASS_IN | _CLASS_UNIQUE,
1805 _DNS_TTL,
1805 _DNS_TTL,
1806 service.text,
1806 service.text,
1807 ),
1807 ),
1808 )
1808 )
1809 if question.type == _TYPE_SRV:
1809 if question.type == _TYPE_SRV:
1810 out.addAdditionalAnswer(
1810 out.addAdditionalAnswer(
1811 DNSAddress(
1811 DNSAddress(
1812 service.server,
1812 service.server,
1813 _TYPE_A,
1813 _TYPE_A,
1814 _CLASS_IN | _CLASS_UNIQUE,
1814 _CLASS_IN | _CLASS_UNIQUE,
1815 _DNS_TTL,
1815 _DNS_TTL,
1816 service.address,
1816 service.address,
1817 )
1817 )
1818 )
1818 )
1819 except Exception:
1819 except Exception:
1820 traceback.print_exc()
1820 traceback.print_exc()
1821
1821
1822 if out is not None and out.answers:
1822 if out is not None and out.answers:
1823 out.id = msg.id
1823 out.id = msg.id
1824 self.send(out, addr, port)
1824 self.send(out, addr, port)
1825
1825
1826 def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
1826 def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
1827 """Sends an outgoing packet."""
1827 """Sends an outgoing packet."""
1828 # This is a quick test to see if we can parse the packets we generate
1828 # This is a quick test to see if we can parse the packets we generate
1829 # temp = DNSIncoming(out.packet())
1829 # temp = DNSIncoming(out.packet())
1830 try:
1830 try:
1831 self.socket.sendto(out.packet(), 0, (addr, port))
1831 self.socket.sendto(out.packet(), 0, (addr, port))
1832 except Exception:
1832 except Exception:
1833 # Ignore this, it may be a temporary loss of network connection
1833 # Ignore this, it may be a temporary loss of network connection
1834 pass
1834 pass
1835
1835
1836 def close(self):
1836 def close(self):
1837 """Ends the background threads, and prevent this instance from
1837 """Ends the background threads, and prevent this instance from
1838 servicing further queries."""
1838 servicing further queries."""
1839 if globals()[b'_GLOBAL_DONE'] == 0:
1839 if globals()[b'_GLOBAL_DONE'] == 0:
1840 globals()[b'_GLOBAL_DONE'] = 1
1840 globals()[b'_GLOBAL_DONE'] = 1
1841 self.notifyAll()
1841 self.notifyAll()
1842 self.engine.notify()
1842 self.engine.notify()
1843 self.unregisterAllServices()
1843 self.unregisterAllServices()
1844 self.socket.setsockopt(
1844 self.socket.setsockopt(
1845 socket.SOL_IP,
1845 socket.SOL_IP,
1846 socket.IP_DROP_MEMBERSHIP,
1846 socket.IP_DROP_MEMBERSHIP,
1847 socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'),
1847 socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'),
1848 )
1848 )
1849 self.socket.close()
1849 self.socket.close()
1850
1850
1851
1851
1852 # Test a few module features, including service registration, service
1852 # Test a few module features, including service registration, service
1853 # query (for Zoe), and service unregistration.
1853 # query (for Zoe), and service unregistration.
1854
1854
1855 if __name__ == '__main__':
1855 if __name__ == '__main__':
1856 print(b"Multicast DNS Service Discovery for Python, version", __version__)
1856 print(b"Multicast DNS Service Discovery for Python, version", __version__)
1857 r = Zeroconf()
1857 r = Zeroconf()
1858 print(b"1. Testing registration of a service...")
1858 print(b"1. Testing registration of a service...")
1859 desc = {b'version': b'0.10', b'a': b'test value', b'b': b'another value'}
1859 desc = {b'version': b'0.10', b'a': b'test value', b'b': b'another value'}
1860 info = ServiceInfo(
1860 info = ServiceInfo(
1861 b"_http._tcp.local.",
1861 b"_http._tcp.local.",
1862 b"My Service Name._http._tcp.local.",
1862 b"My Service Name._http._tcp.local.",
1863 socket.inet_aton("127.0.0.1"),
1863 socket.inet_aton("127.0.0.1"),
1864 1234,
1864 1234,
1865 0,
1865 0,
1866 0,
1866 0,
1867 desc,
1867 desc,
1868 )
1868 )
1869 print(b" Registering service...")
1869 print(b" Registering service...")
1870 r.registerService(info)
1870 r.registerService(info)
1871 print(b" Registration done.")
1871 print(b" Registration done.")
1872 print(b"2. Testing query of service information...")
1872 print(b"2. Testing query of service information...")
1873 print(
1873 print(
1874 b" Getting ZOE service:",
1874 b" Getting ZOE service:",
1875 str(r.getServiceInfo(b"_http._tcp.local.", b"ZOE._http._tcp.local.")),
1875 str(r.getServiceInfo(b"_http._tcp.local.", b"ZOE._http._tcp.local.")),
1876 )
1876 )
1877 print(b" Query done.")
1877 print(b" Query done.")
1878 print(b"3. Testing query of own service...")
1878 print(b"3. Testing query of own service...")
1879 print(
1879 print(
1880 b" Getting self:",
1880 b" Getting self:",
1881 str(
1881 str(
1882 r.getServiceInfo(
1882 r.getServiceInfo(
1883 b"_http._tcp.local.", b"My Service Name._http._tcp.local."
1883 b"_http._tcp.local.", b"My Service Name._http._tcp.local."
1884 )
1884 )
1885 ),
1885 ),
1886 )
1886 )
1887 print(b" Query done.")
1887 print(b" Query done.")
1888 print(b"4. Testing unregister of service information...")
1888 print(b"4. Testing unregister of service information...")
1889 r.unregisterService(info)
1889 r.unregisterService(info)
1890 print(b" Unregister done.")
1890 print(b" Unregister done.")
1891 r.close()
1891 r.close()
@@ -1,88 +1,88
1 # bdiff.py - CFFI implementation of bdiff.c
1 # bdiff.py - CFFI implementation of bdiff.c
2 #
2 #
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import struct
9 import struct
10
10
11 from typing import (
11 from typing import (
12 List,
12 List,
13 Tuple,
13 Tuple,
14 )
14 )
15
15
16 from ..pure.bdiff import *
16 from ..pure.bdiff import *
17 from . import _bdiff # pytype: disable=import-error
17 from . import _bdiff # pytype: disable=import-error
18
18
19 ffi = _bdiff.ffi
19 ffi = _bdiff.ffi
20 lib = _bdiff.lib
20 lib = _bdiff.lib
21
21
22
22
23 def blocks(sa: bytes, sb: bytes) -> List[Tuple[int, int, int, int]]:
23 def blocks(sa: bytes, sb: bytes) -> List[Tuple[int, int, int, int]]:
24 a = ffi.new(b"struct bdiff_line**")
24 a = ffi.new("struct bdiff_line**")
25 b = ffi.new(b"struct bdiff_line**")
25 b = ffi.new("struct bdiff_line**")
26 ac = ffi.new(b"char[]", str(sa))
26 ac = ffi.new("char[]", bytes(sa))
27 bc = ffi.new(b"char[]", str(sb))
27 bc = ffi.new("char[]", bytes(sb))
28 l = ffi.new(b"struct bdiff_hunk*")
28 l = ffi.new("struct bdiff_hunk*")
29 try:
29 try:
30 an = lib.bdiff_splitlines(ac, len(sa), a)
30 an = lib.bdiff_splitlines(ac, len(sa), a)
31 bn = lib.bdiff_splitlines(bc, len(sb), b)
31 bn = lib.bdiff_splitlines(bc, len(sb), b)
32 if not a[0] or not b[0]:
32 if not a[0] or not b[0]:
33 raise MemoryError
33 raise MemoryError
34 count = lib.bdiff_diff(a[0], an, b[0], bn, l)
34 count = lib.bdiff_diff(a[0], an, b[0], bn, l)
35 if count < 0:
35 if count < 0:
36 raise MemoryError
36 raise MemoryError
37 rl = [(0, 0, 0, 0)] * count
37 rl = [(0, 0, 0, 0)] * count
38 h = l.next
38 h = l.next
39 i = 0
39 i = 0
40 while h:
40 while h:
41 rl[i] = (h.a1, h.a2, h.b1, h.b2)
41 rl[i] = (h.a1, h.a2, h.b1, h.b2)
42 h = h.next
42 h = h.next
43 i += 1
43 i += 1
44 finally:
44 finally:
45 lib.free(a[0])
45 lib.free(a[0])
46 lib.free(b[0])
46 lib.free(b[0])
47 lib.bdiff_freehunks(l.next)
47 lib.bdiff_freehunks(l.next)
48 return rl
48 return rl
49
49
50
50
51 def bdiff(sa: bytes, sb: bytes) -> bytes:
51 def bdiff(sa: bytes, sb: bytes) -> bytes:
52 a = ffi.new(b"struct bdiff_line**")
52 a = ffi.new("struct bdiff_line**")
53 b = ffi.new(b"struct bdiff_line**")
53 b = ffi.new("struct bdiff_line**")
54 ac = ffi.new(b"char[]", str(sa))
54 ac = ffi.new("char[]", bytes(sa))
55 bc = ffi.new(b"char[]", str(sb))
55 bc = ffi.new("char[]", bytes(sb))
56 l = ffi.new(b"struct bdiff_hunk*")
56 l = ffi.new("struct bdiff_hunk*")
57 try:
57 try:
58 an = lib.bdiff_splitlines(ac, len(sa), a)
58 an = lib.bdiff_splitlines(ac, len(sa), a)
59 bn = lib.bdiff_splitlines(bc, len(sb), b)
59 bn = lib.bdiff_splitlines(bc, len(sb), b)
60 if not a[0] or not b[0]:
60 if not a[0] or not b[0]:
61 raise MemoryError
61 raise MemoryError
62 count = lib.bdiff_diff(a[0], an, b[0], bn, l)
62 count = lib.bdiff_diff(a[0], an, b[0], bn, l)
63 if count < 0:
63 if count < 0:
64 raise MemoryError
64 raise MemoryError
65 rl = []
65 rl = []
66 h = l.next
66 h = l.next
67 la = lb = 0
67 la = lb = 0
68 while h:
68 while h:
69 if h.a1 != la or h.b1 != lb:
69 if h.a1 != la or h.b1 != lb:
70 lgt = (b[0] + h.b1).l - (b[0] + lb).l
70 lgt = (b[0] + h.b1).l - (b[0] + lb).l
71 rl.append(
71 rl.append(
72 struct.pack(
72 struct.pack(
73 b">lll",
73 b">lll",
74 (a[0] + la).l - a[0].l,
74 (a[0] + la).l - a[0].l,
75 (a[0] + h.a1).l - a[0].l,
75 (a[0] + h.a1).l - a[0].l,
76 lgt,
76 lgt,
77 )
77 )
78 )
78 )
79 rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
79 rl.append(bytes(ffi.buffer((b[0] + lb).l, lgt)))
80 la = h.a2
80 la = h.a2
81 lb = h.b2
81 lb = h.b2
82 h = h.next
82 h = h.next
83
83
84 finally:
84 finally:
85 lib.free(a[0])
85 lib.free(a[0])
86 lib.free(b[0])
86 lib.free(b[0])
87 lib.bdiff_freehunks(l.next)
87 lib.bdiff_freehunks(l.next)
88 return b"".join(rl)
88 return b"".join(rl)
@@ -1,50 +1,50
1 # mpatch.py - CFFI implementation of mpatch.c
1 # mpatch.py - CFFI implementation of mpatch.c
2 #
2 #
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 from typing import List
9 from typing import List
10
10
11 from ..pure.mpatch import *
11 from ..pure.mpatch import *
12 from ..pure.mpatch import mpatchError # silence pyflakes
12 from ..pure.mpatch import mpatchError # silence pyflakes
13 from . import _mpatch # pytype: disable=import-error
13 from . import _mpatch # pytype: disable=import-error
14
14
15 ffi = _mpatch.ffi
15 ffi = _mpatch.ffi
16 lib = _mpatch.lib
16 lib = _mpatch.lib
17
17
18
18
19 @ffi.def_extern()
19 @ffi.def_extern()
20 def cffi_get_next_item(arg, pos):
20 def cffi_get_next_item(arg, pos):
21 all, bins = ffi.from_handle(arg)
21 all, bins = ffi.from_handle(arg)
22 container = ffi.new(b"struct mpatch_flist*[1]")
22 container = ffi.new("struct mpatch_flist*[1]")
23 to_pass = ffi.new(b"char[]", str(bins[pos]))
23 to_pass = ffi.new("char[]", bytes(bins[pos]))
24 all.append(to_pass)
24 all.append(to_pass)
25 r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
25 r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
26 if r < 0:
26 if r < 0:
27 return ffi.NULL
27 return ffi.NULL
28 return container[0]
28 return container[0]
29
29
30
30
31 def patches(text: bytes, bins: List[bytes]) -> bytes:
31 def patches(text: bytes, bins: List[bytes]) -> bytes:
32 lgt = len(bins)
32 lgt = len(bins)
33 all = []
33 all = []
34 if not lgt:
34 if not lgt:
35 return text
35 return text
36 arg = (all, bins)
36 arg = (all, bins)
37 patch = lib.mpatch_fold(ffi.new_handle(arg), lib.cffi_get_next_item, 0, lgt)
37 patch = lib.mpatch_fold(ffi.new_handle(arg), lib.cffi_get_next_item, 0, lgt)
38 if not patch:
38 if not patch:
39 raise mpatchError(b"cannot decode chunk")
39 raise mpatchError(b"cannot decode chunk")
40 outlen = lib.mpatch_calcsize(len(text), patch)
40 outlen = lib.mpatch_calcsize(len(text), patch)
41 if outlen < 0:
41 if outlen < 0:
42 lib.mpatch_lfree(patch)
42 lib.mpatch_lfree(patch)
43 raise mpatchError(b"inconsistency detected")
43 raise mpatchError(b"inconsistency detected")
44 buf = ffi.new(b"char[]", outlen)
44 buf = ffi.new("char[]", outlen)
45 if lib.mpatch_apply(buf, text, len(text), patch) < 0:
45 if lib.mpatch_apply(buf, text, len(text), patch) < 0:
46 lib.mpatch_lfree(patch)
46 lib.mpatch_lfree(patch)
47 raise mpatchError(b"error applying patches")
47 raise mpatchError(b"error applying patches")
48 res = ffi.buffer(buf, outlen)[:]
48 res = ffi.buffer(buf, outlen)[:]
49 lib.mpatch_lfree(patch)
49 lib.mpatch_lfree(patch)
50 return res
50 return res
@@ -1,114 +1,114
1 # osutil.py - CFFI version of osutil.c
1 # osutil.py - CFFI version of osutil.c
2 #
2 #
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
3 # Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import os
9 import os
10 import stat as statmod
10 import stat as statmod
11
11
12 from ..pure.osutil import *
12 from ..pure.osutil import *
13
13
14 from .. import pycompat
14 from .. import pycompat
15
15
16 if pycompat.isdarwin:
16 if pycompat.isdarwin:
17 from . import _osutil # pytype: disable=import-error
17 from . import _osutil # pytype: disable=import-error
18
18
19 ffi = _osutil.ffi
19 ffi = _osutil.ffi
20 lib = _osutil.lib
20 lib = _osutil.lib
21
21
22 listdir_batch_size = 4096
22 listdir_batch_size = 4096
23 # tweakable number, only affects performance, which chunks
23 # tweakable number, only affects performance, which chunks
24 # of bytes do we get back from getattrlistbulk
24 # of bytes do we get back from getattrlistbulk
25
25
26 attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
26 attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
27
27
28 attrkinds[lib.VREG] = statmod.S_IFREG
28 attrkinds[lib.VREG] = statmod.S_IFREG
29 attrkinds[lib.VDIR] = statmod.S_IFDIR
29 attrkinds[lib.VDIR] = statmod.S_IFDIR
30 attrkinds[lib.VLNK] = statmod.S_IFLNK
30 attrkinds[lib.VLNK] = statmod.S_IFLNK
31 attrkinds[lib.VBLK] = statmod.S_IFBLK
31 attrkinds[lib.VBLK] = statmod.S_IFBLK
32 attrkinds[lib.VCHR] = statmod.S_IFCHR
32 attrkinds[lib.VCHR] = statmod.S_IFCHR
33 attrkinds[lib.VFIFO] = statmod.S_IFIFO
33 attrkinds[lib.VFIFO] = statmod.S_IFIFO
34 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
34 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
35
35
36 class stat_res:
36 class stat_res:
37 def __init__(self, st_mode, st_mtime, st_size):
37 def __init__(self, st_mode, st_mtime, st_size):
38 self.st_mode = st_mode
38 self.st_mode = st_mode
39 self.st_mtime = st_mtime
39 self.st_mtime = st_mtime
40 self.st_size = st_size
40 self.st_size = st_size
41
41
42 tv_sec_ofs = ffi.offsetof(b"struct timespec", b"tv_sec")
42 tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
43 buf = ffi.new(b"char[]", listdir_batch_size)
43 buf = ffi.new("char[]", listdir_batch_size)
44
44
45 def listdirinternal(dfd, req, stat, skip):
45 def listdirinternal(dfd, req, stat, skip):
46 ret = []
46 ret = []
47 while True:
47 while True:
48 r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
48 r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
49 if r == 0:
49 if r == 0:
50 break
50 break
51 if r == -1:
51 if r == -1:
52 raise OSError(ffi.errno, os.strerror(ffi.errno))
52 raise OSError(ffi.errno, os.strerror(ffi.errno))
53 cur = ffi.cast(b"val_attrs_t*", buf)
53 cur = ffi.cast("val_attrs_t*", buf)
54 for i in range(r):
54 for i in range(r):
55 lgt = cur.length
55 lgt = cur.length
56 assert lgt == ffi.cast(b'uint32_t*', cur)[0]
56 assert lgt == ffi.cast('uint32_t*', cur)[0]
57 ofs = cur.name_info.attr_dataoffset
57 ofs = cur.name_info.attr_dataoffset
58 str_lgt = cur.name_info.attr_length
58 str_lgt = cur.name_info.attr_length
59 base_ofs = ffi.offsetof(b'val_attrs_t', b'name_info')
59 base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
60 name = bytes(
60 name = bytes(
61 ffi.buffer(
61 ffi.buffer(
62 ffi.cast(b"char*", cur) + base_ofs + ofs, str_lgt - 1
62 ffi.cast("char*", cur) + base_ofs + ofs, str_lgt - 1
63 )
63 )
64 )
64 )
65 tp = attrkinds[cur.obj_type]
65 tp = attrkinds[cur.obj_type]
66 if name == b"." or name == b"..":
66 if name == b"." or name == b"..":
67 continue
67 continue
68 if skip == name and tp == statmod.S_ISDIR:
68 if skip == name and tp == statmod.S_ISDIR:
69 return []
69 return []
70 if stat:
70 if stat:
71 mtime = cur.mtime.tv_sec
71 mtime = cur.mtime.tv_sec
72 mode = (cur.accessmask & ~lib.S_IFMT) | tp
72 mode = (cur.accessmask & ~lib.S_IFMT) | tp
73 ret.append(
73 ret.append(
74 (
74 (
75 name,
75 name,
76 tp,
76 tp,
77 stat_res(
77 stat_res(
78 st_mode=mode,
78 st_mode=mode,
79 st_mtime=mtime,
79 st_mtime=mtime,
80 st_size=cur.datalength,
80 st_size=cur.datalength,
81 ),
81 ),
82 )
82 )
83 )
83 )
84 else:
84 else:
85 ret.append((name, tp))
85 ret.append((name, tp))
86 cur = ffi.cast(
86 cur = ffi.cast(
87 b"val_attrs_t*", int(ffi.cast(b"intptr_t", cur)) + lgt
87 "val_attrs_t*", int(ffi.cast("intptr_t", cur)) + lgt
88 )
88 )
89 return ret
89 return ret
90
90
91 def listdir(path, stat=False, skip=None):
91 def listdir(path, stat=False, skip=None):
92 req = ffi.new(b"struct attrlist*")
92 req = ffi.new("struct attrlist*")
93 req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
93 req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
94 req.commonattr = (
94 req.commonattr = (
95 lib.ATTR_CMN_RETURNED_ATTRS
95 lib.ATTR_CMN_RETURNED_ATTRS
96 | lib.ATTR_CMN_NAME
96 | lib.ATTR_CMN_NAME
97 | lib.ATTR_CMN_OBJTYPE
97 | lib.ATTR_CMN_OBJTYPE
98 | lib.ATTR_CMN_ACCESSMASK
98 | lib.ATTR_CMN_ACCESSMASK
99 | lib.ATTR_CMN_MODTIME
99 | lib.ATTR_CMN_MODTIME
100 )
100 )
101 req.fileattr = lib.ATTR_FILE_DATALENGTH
101 req.fileattr = lib.ATTR_FILE_DATALENGTH
102 dfd = lib.open(path, lib.O_RDONLY, 0)
102 dfd = lib.open(path, lib.O_RDONLY, 0)
103 if dfd == -1:
103 if dfd == -1:
104 raise OSError(ffi.errno, os.strerror(ffi.errno))
104 raise OSError(ffi.errno, os.strerror(ffi.errno))
105
105
106 try:
106 try:
107 ret = listdirinternal(dfd, req, stat, skip)
107 ret = listdirinternal(dfd, req, stat, skip)
108 finally:
108 finally:
109 try:
109 try:
110 lib.close(dfd)
110 lib.close(dfd)
111 except BaseException:
111 except BaseException:
112 pass # we ignore all the errors from closing, not
112 pass # we ignore all the errors from closing, not
113 # much we can do about that
113 # much we can do about that
114 return ret
114 return ret
@@ -1,525 +1,525
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9
9
10 import contextlib
10 import contextlib
11 import os
11 import os
12
12
13 from .common import (
13 from .common import (
14 ErrorResponse,
14 ErrorResponse,
15 HTTP_BAD_REQUEST,
15 HTTP_BAD_REQUEST,
16 cspvalues,
16 cspvalues,
17 permhooks,
17 permhooks,
18 statusmessage,
18 statusmessage,
19 )
19 )
20
20
21 from .. import (
21 from .. import (
22 encoding,
22 encoding,
23 error,
23 error,
24 extensions,
24 extensions,
25 formatter,
25 formatter,
26 hg,
26 hg,
27 hook,
27 hook,
28 profiling,
28 profiling,
29 pycompat,
29 pycompat,
30 registrar,
30 registrar,
31 repoview,
31 repoview,
32 templatefilters,
32 templatefilters,
33 templater,
33 templater,
34 templateutil,
34 templateutil,
35 ui as uimod,
35 ui as uimod,
36 wireprotoserver,
36 wireprotoserver,
37 )
37 )
38
38
39 from . import (
39 from . import (
40 common,
40 common,
41 request as requestmod,
41 request as requestmod,
42 webcommands,
42 webcommands,
43 webutil,
43 webutil,
44 wsgicgi,
44 wsgicgi,
45 )
45 )
46
46
47
47
48 def getstyle(req, configfn, templatepath):
48 def getstyle(req, configfn, templatepath):
49 styles = (
49 styles = (
50 req.qsparams.get(b'style', None),
50 req.qsparams.get(b'style', None),
51 configfn(b'web', b'style'),
51 configfn(b'web', b'style'),
52 b'paper',
52 b'paper',
53 )
53 )
54 return styles, _stylemap(styles, templatepath)
54 return styles, _stylemap(styles, templatepath)
55
55
56
56
57 def _stylemap(styles, path=None):
57 def _stylemap(styles, path=None):
58 """Return path to mapfile for a given style.
58 """Return path to mapfile for a given style.
59
59
60 Searches mapfile in the following locations:
60 Searches mapfile in the following locations:
61 1. templatepath/style/map
61 1. templatepath/style/map
62 2. templatepath/map-style
62 2. templatepath/map-style
63 3. templatepath/map
63 3. templatepath/map
64 """
64 """
65
65
66 for style in styles:
66 for style in styles:
67 # only plain name is allowed to honor template paths
67 # only plain name is allowed to honor template paths
68 if (
68 if (
69 not style
69 not style
70 or style in (pycompat.oscurdir, pycompat.ospardir)
70 or style in (pycompat.oscurdir, pycompat.ospardir)
71 or pycompat.ossep in style
71 or pycompat.ossep in style
72 or pycompat.osaltsep
72 or pycompat.osaltsep
73 and pycompat.osaltsep in style
73 and pycompat.osaltsep in style
74 ):
74 ):
75 continue
75 continue
76 locations = (os.path.join(style, b'map'), b'map-' + style, b'map')
76 locations = (os.path.join(style, b'map'), b'map-' + style, b'map')
77
77
78 for location in locations:
78 for location in locations:
79 mapfile, fp = templater.try_open_template(location, path)
79 mapfile, fp = templater.try_open_template(location, path)
80 if mapfile:
80 if mapfile:
81 return style, mapfile, fp
81 return style, mapfile, fp
82
82
83 raise RuntimeError(b"No hgweb templates found in %r" % path)
83 raise RuntimeError(b"No hgweb templates found in %r" % path)
84
84
85
85
86 def makebreadcrumb(url, prefix=b''):
86 def makebreadcrumb(url, prefix=b''):
87 """Return a 'URL breadcrumb' list
87 """Return a 'URL breadcrumb' list
88
88
89 A 'URL breadcrumb' is a list of URL-name pairs,
89 A 'URL breadcrumb' is a list of URL-name pairs,
90 corresponding to each of the path items on a URL.
90 corresponding to each of the path items on a URL.
91 This can be used to create path navigation entries.
91 This can be used to create path navigation entries.
92 """
92 """
93 if url.endswith(b'/'):
93 if url.endswith(b'/'):
94 url = url[:-1]
94 url = url[:-1]
95 if prefix:
95 if prefix:
96 url = b'/' + prefix + url
96 url = b'/' + prefix + url
97 relpath = url
97 relpath = url
98 if relpath.startswith(b'/'):
98 if relpath.startswith(b'/'):
99 relpath = relpath[1:]
99 relpath = relpath[1:]
100
100
101 breadcrumb = []
101 breadcrumb = []
102 urlel = url
102 urlel = url
103 pathitems = [b''] + relpath.split(b'/')
103 pathitems = [b''] + relpath.split(b'/')
104 for pathel in reversed(pathitems):
104 for pathel in reversed(pathitems):
105 if not pathel or not urlel:
105 if not pathel or not urlel:
106 break
106 break
107 breadcrumb.append({b'url': urlel, b'name': pathel})
107 breadcrumb.append({b'url': urlel, b'name': pathel})
108 urlel = os.path.dirname(urlel)
108 urlel = os.path.dirname(urlel)
109 return templateutil.mappinglist(reversed(breadcrumb))
109 return templateutil.mappinglist(reversed(breadcrumb))
110
110
111
111
112 class requestcontext:
112 class requestcontext:
113 """Holds state/context for an individual request.
113 """Holds state/context for an individual request.
114
114
115 Servers can be multi-threaded. Holding state on the WSGI application
115 Servers can be multi-threaded. Holding state on the WSGI application
116 is prone to race conditions. Instances of this class exist to hold
116 is prone to race conditions. Instances of this class exist to hold
117 mutable and race-free state for requests.
117 mutable and race-free state for requests.
118 """
118 """
119
119
120 def __init__(self, app, repo, req, res):
120 def __init__(self, app, repo, req, res):
121 self.repo = repo
121 self.repo = repo
122 self.reponame = app.reponame
122 self.reponame = app.reponame
123 self.req = req
123 self.req = req
124 self.res = res
124 self.res = res
125
125
126 # Only works if the filter actually support being upgraded to show
126 # Only works if the filter actually support being upgraded to show
127 # visible changesets
127 # visible changesets
128 current_filter = repo.filtername
128 current_filter = repo.filtername
129 if (
129 if (
130 common.hashiddenaccess(repo, req)
130 common.hashiddenaccess(repo, req)
131 and current_filter is not None
131 and current_filter is not None
132 and current_filter + b'.hidden' in repoview.filtertable
132 and current_filter + b'.hidden' in repoview.filtertable
133 ):
133 ):
134 self.repo = self.repo.filtered(repo.filtername + b'.hidden')
134 self.repo = self.repo.filtered(repo.filtername + b'.hidden')
135
135
136 self.maxchanges = self.configint(b'web', b'maxchanges')
136 self.maxchanges = self.configint(b'web', b'maxchanges')
137 self.stripecount = self.configint(b'web', b'stripes')
137 self.stripecount = self.configint(b'web', b'stripes')
138 self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
138 self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
139 self.maxfiles = self.configint(b'web', b'maxfiles')
139 self.maxfiles = self.configint(b'web', b'maxfiles')
140 self.allowpull = self.configbool(b'web', b'allow-pull')
140 self.allowpull = self.configbool(b'web', b'allow-pull')
141
141
142 # we use untrusted=False to prevent a repo owner from using
142 # we use untrusted=False to prevent a repo owner from using
143 # web.templates in .hg/hgrc to get access to any file readable
143 # web.templates in .hg/hgrc to get access to any file readable
144 # by the user running the CGI script
144 # by the user running the CGI script
145 self.templatepath = self.config(b'web', b'templates', untrusted=False)
145 self.templatepath = self.config(b'web', b'templates', untrusted=False)
146
146
147 # This object is more expensive to build than simple config values.
147 # This object is more expensive to build than simple config values.
148 # It is shared across requests. The app will replace the object
148 # It is shared across requests. The app will replace the object
149 # if it is updated. Since this is a reference and nothing should
149 # if it is updated. Since this is a reference and nothing should
150 # modify the underlying object, it should be constant for the lifetime
150 # modify the underlying object, it should be constant for the lifetime
151 # of the request.
151 # of the request.
152 self.websubtable = app.websubtable
152 self.websubtable = app.websubtable
153
153
154 self.csp, self.nonce = cspvalues(self.repo.ui)
154 self.csp, self.nonce = cspvalues(self.repo.ui)
155
155
156 # Trust the settings from the .hg/hgrc files by default.
156 # Trust the settings from the .hg/hgrc files by default.
157 def config(self, *args, **kwargs):
157 def config(self, *args, **kwargs):
158 kwargs.setdefault('untrusted', True)
158 kwargs.setdefault('untrusted', True)
159 return self.repo.ui.config(*args, **kwargs)
159 return self.repo.ui.config(*args, **kwargs)
160
160
161 def configbool(self, *args, **kwargs):
161 def configbool(self, *args, **kwargs):
162 kwargs.setdefault('untrusted', True)
162 kwargs.setdefault('untrusted', True)
163 return self.repo.ui.configbool(*args, **kwargs)
163 return self.repo.ui.configbool(*args, **kwargs)
164
164
165 def configint(self, *args, **kwargs):
165 def configint(self, *args, **kwargs):
166 kwargs.setdefault('untrusted', True)
166 kwargs.setdefault('untrusted', True)
167 return self.repo.ui.configint(*args, **kwargs)
167 return self.repo.ui.configint(*args, **kwargs)
168
168
169 def configlist(self, *args, **kwargs):
169 def configlist(self, *args, **kwargs):
170 kwargs.setdefault('untrusted', True)
170 kwargs.setdefault('untrusted', True)
171 return self.repo.ui.configlist(*args, **kwargs)
171 return self.repo.ui.configlist(*args, **kwargs)
172
172
173 def archivelist(self, nodeid):
173 def archivelist(self, nodeid):
174 return webutil.archivelist(self.repo.ui, nodeid)
174 return webutil.archivelist(self.repo.ui, nodeid)
175
175
176 def templater(self, req):
176 def templater(self, req):
177 # determine scheme, port and server name
177 # determine scheme, port and server name
178 # this is needed to create absolute urls
178 # this is needed to create absolute urls
179 logourl = self.config(b'web', b'logourl')
179 logourl = self.config(b'web', b'logourl')
180 logoimg = self.config(b'web', b'logoimg')
180 logoimg = self.config(b'web', b'logoimg')
181 staticurl = (
181 staticurl = (
182 self.config(b'web', b'staticurl')
182 self.config(b'web', b'staticurl')
183 or req.apppath.rstrip(b'/') + b'/static/'
183 or req.apppath.rstrip(b'/') + b'/static/'
184 )
184 )
185 if not staticurl.endswith(b'/'):
185 if not staticurl.endswith(b'/'):
186 staticurl += b'/'
186 staticurl += b'/'
187
187
188 # figure out which style to use
188 # figure out which style to use
189
189
190 vars = {}
190 vars = {}
191 styles, (style, mapfile, fp) = getstyle(
191 styles, (style, mapfile, fp) = getstyle(
192 req, self.config, self.templatepath
192 req, self.config, self.templatepath
193 )
193 )
194 if style == styles[0]:
194 if style == styles[0]:
195 vars[b'style'] = style
195 vars[b'style'] = style
196
196
197 sessionvars = webutil.sessionvars(vars, b'?')
197 sessionvars = webutil.sessionvars(vars, b'?')
198
198
199 if not self.reponame:
199 if not self.reponame:
200 self.reponame = (
200 self.reponame = (
201 self.config(b'web', b'name', b'')
201 self.config(b'web', b'name', b'')
202 or req.reponame
202 or req.reponame
203 or req.apppath
203 or req.apppath
204 or self.repo.root
204 or self.repo.root
205 )
205 )
206
206
207 filters = {}
207 filters = {}
208 templatefilter = registrar.templatefilter(filters)
208 templatefilter = registrar.templatefilter(filters)
209
209
210 @templatefilter(b'websub', intype=bytes)
210 @templatefilter(b'websub', intype=bytes)
211 def websubfilter(text):
211 def websubfilter(text):
212 return templatefilters.websub(text, self.websubtable)
212 return templatefilters.websub(text, self.websubtable)
213
213
214 # create the templater
214 # create the templater
215 # TODO: export all keywords: defaults = templatekw.keywords.copy()
215 # TODO: export all keywords: defaults = templatekw.keywords.copy()
216 defaults = {
216 defaults = {
217 b'url': req.apppath + b'/',
217 b'url': req.apppath + b'/',
218 b'logourl': logourl,
218 b'logourl': logourl,
219 b'logoimg': logoimg,
219 b'logoimg': logoimg,
220 b'staticurl': staticurl,
220 b'staticurl': staticurl,
221 b'urlbase': req.advertisedbaseurl,
221 b'urlbase': req.advertisedbaseurl,
222 b'repo': self.reponame,
222 b'repo': self.reponame,
223 b'encoding': encoding.encoding,
223 b'encoding': encoding.encoding,
224 b'sessionvars': sessionvars,
224 b'sessionvars': sessionvars,
225 b'pathdef': makebreadcrumb(req.apppath),
225 b'pathdef': makebreadcrumb(req.apppath),
226 b'style': style,
226 b'style': style,
227 b'nonce': self.nonce,
227 b'nonce': self.nonce,
228 }
228 }
229 templatekeyword = registrar.templatekeyword(defaults)
229 templatekeyword = registrar.templatekeyword(defaults)
230
230
231 @templatekeyword(b'motd', requires=())
231 @templatekeyword(b'motd', requires=())
232 def motd(context, mapping):
232 def motd(context, mapping):
233 yield self.config(b'web', b'motd')
233 yield self.config(b'web', b'motd')
234
234
235 tres = formatter.templateresources(self.repo.ui, self.repo)
235 tres = formatter.templateresources(self.repo.ui, self.repo)
236 return templater.templater.frommapfile(
236 return templater.templater.frommapfile(
237 mapfile, fp=fp, filters=filters, defaults=defaults, resources=tres
237 mapfile, fp=fp, filters=filters, defaults=defaults, resources=tres
238 )
238 )
239
239
240 def sendtemplate(self, name, **kwargs):
240 def sendtemplate(self, name, **kwargs):
241 """Helper function to send a response generated from a template."""
241 """Helper function to send a response generated from a template."""
242 if self.req.method != b'HEAD':
242 if self.req.method != b'HEAD':
243 kwargs = pycompat.byteskwargs(kwargs)
243 kwargs = pycompat.byteskwargs(kwargs)
244 self.res.setbodygen(self.tmpl.generate(name, kwargs))
244 self.res.setbodygen(self.tmpl.generate(name, kwargs))
245 return self.res.sendresponse()
245 return self.res.sendresponse()
246
246
247
247
248 class hgweb:
248 class hgweb:
249 """HTTP server for individual repositories.
249 """HTTP server for individual repositories.
250
250
251 Instances of this class serve HTTP responses for a particular
251 Instances of this class serve HTTP responses for a particular
252 repository.
252 repository.
253
253
254 Instances are typically used as WSGI applications.
254 Instances are typically used as WSGI applications.
255
255
256 Some servers are multi-threaded. On these servers, there may
256 Some servers are multi-threaded. On these servers, there may
257 be multiple active threads inside __call__.
257 be multiple active threads inside __call__.
258 """
258 """
259
259
260 def __init__(self, repo, name=None, baseui=None):
260 def __init__(self, repo, name=None, baseui=None):
261 if isinstance(repo, bytes):
261 if isinstance(repo, bytes):
262 if baseui:
262 if baseui:
263 u = baseui.copy()
263 u = baseui.copy()
264 else:
264 else:
265 u = uimod.ui.load()
265 u = uimod.ui.load()
266 extensions.loadall(u)
266 extensions.loadall(u)
267 extensions.populateui(u)
267 extensions.populateui(u)
268 r = hg.repository(u, repo)
268 r = hg.repository(u, repo)
269 else:
269 else:
270 # we trust caller to give us a private copy
270 # we trust caller to give us a private copy
271 r = repo
271 r = repo
272
272
273 r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
273 r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
274 r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
274 r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
275 r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
275 r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
276 r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
276 r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
277 # resolve file patterns relative to repo root
277 # resolve file patterns relative to repo root
278 r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
278 r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
279 r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
279 r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
280 # it's unlikely that we can replace signal handlers in WSGI server,
280 # it's unlikely that we can replace signal handlers in WSGI server,
281 # and mod_wsgi issues a big warning. a plain hgweb process (with no
281 # and mod_wsgi issues a big warning. a plain hgweb process (with no
282 # threading) could replace signal handlers, but we don't bother
282 # threading) could replace signal handlers, but we don't bother
283 # conditionally enabling it.
283 # conditionally enabling it.
284 r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
284 r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
285 r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
285 r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
286 # displaying bundling progress bar while serving feel wrong and may
286 # displaying bundling progress bar while serving feel wrong and may
287 # break some wsgi implementation.
287 # break some wsgi implementation.
288 r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
288 r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
289 r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
289 r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
290 self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
290 self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
291 self._lastrepo = self._repos[0]
291 self._lastrepo = self._repos[0]
292 hook.redirect(True)
292 hook.redirect(True)
293 self.reponame = name
293 self.reponame = name
294
294
295 def _webifyrepo(self, repo):
295 def _webifyrepo(self, repo):
296 repo = getwebview(repo)
296 repo = getwebview(repo)
297 self.websubtable = webutil.getwebsubs(repo)
297 self.websubtable = webutil.getwebsubs(repo)
298 return repo
298 return repo
299
299
300 @contextlib.contextmanager
300 @contextlib.contextmanager
301 def _obtainrepo(self):
301 def _obtainrepo(self):
302 """Obtain a repo unique to the caller.
302 """Obtain a repo unique to the caller.
303
303
304 Internally we maintain a stack of cachedlocalrepo instances
304 Internally we maintain a stack of cachedlocalrepo instances
305 to be handed out. If one is available, we pop it and return it,
305 to be handed out. If one is available, we pop it and return it,
306 ensuring it is up to date in the process. If one is not available,
306 ensuring it is up to date in the process. If one is not available,
307 we clone the most recently used repo instance and return it.
307 we clone the most recently used repo instance and return it.
308
308
309 It is currently possible for the stack to grow without bounds
309 It is currently possible for the stack to grow without bounds
310 if the server allows infinite threads. However, servers should
310 if the server allows infinite threads. However, servers should
311 have a thread limit, thus establishing our limit.
311 have a thread limit, thus establishing our limit.
312 """
312 """
313 if self._repos:
313 if self._repos:
314 cached = self._repos.pop()
314 cached = self._repos.pop()
315 r, created = cached.fetch()
315 r, created = cached.fetch()
316 else:
316 else:
317 cached = self._lastrepo.copy()
317 cached = self._lastrepo.copy()
318 r, created = cached.fetch()
318 r, created = cached.fetch()
319 if created:
319 if created:
320 r = self._webifyrepo(r)
320 r = self._webifyrepo(r)
321
321
322 self._lastrepo = cached
322 self._lastrepo = cached
323 self.mtime = cached.mtime
323 self.mtime = cached.mtime
324 try:
324 try:
325 yield r
325 yield r
326 finally:
326 finally:
327 self._repos.append(cached)
327 self._repos.append(cached)
328
328
329 def run(self):
329 def run(self):
330 """Start a server from CGI environment.
330 """Start a server from CGI environment.
331
331
332 Modern servers should be using WSGI and should avoid this
332 Modern servers should be using WSGI and should avoid this
333 method, if possible.
333 method, if possible.
334 """
334 """
335 if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
335 if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
336 b"CGI/1."
336 b"CGI/1."
337 ):
337 ):
338 raise RuntimeError(
338 raise RuntimeError(
339 b"This function is only intended to be "
339 b"This function is only intended to be "
340 b"called while running as a CGI script."
340 b"called while running as a CGI script."
341 )
341 )
342 wsgicgi.launch(self)
342 wsgicgi.launch(self)
343
343
344 def __call__(self, env, respond):
344 def __call__(self, env, respond):
345 """Run the WSGI application.
345 """Run the WSGI application.
346
346
347 This may be called by multiple threads.
347 This may be called by multiple threads.
348 """
348 """
349 req = requestmod.parserequestfromenv(env)
349 req = requestmod.parserequestfromenv(env)
350 res = requestmod.wsgiresponse(req, respond)
350 res = requestmod.wsgiresponse(req, respond)
351
351
352 return self.run_wsgi(req, res)
352 return self.run_wsgi(req, res)
353
353
354 def run_wsgi(self, req, res):
354 def run_wsgi(self, req, res):
355 """Internal method to run the WSGI application.
355 """Internal method to run the WSGI application.
356
356
357 This is typically only called by Mercurial. External consumers
357 This is typically only called by Mercurial. External consumers
358 should be using instances of this class as the WSGI application.
358 should be using instances of this class as the WSGI application.
359 """
359 """
360 with self._obtainrepo() as repo:
360 with self._obtainrepo() as repo:
361 profile = repo.ui.configbool(b'profiling', b'enabled')
361 profile = repo.ui.configbool(b'profiling', b'enabled')
362 with profiling.profile(repo.ui, enabled=profile):
362 with profiling.profile(repo.ui, enabled=profile):
363 for r in self._runwsgi(req, res, repo):
363 for r in self._runwsgi(req, res, repo):
364 yield r
364 yield r
365
365
366 def _runwsgi(self, req, res, repo):
366 def _runwsgi(self, req, res, repo):
367 rctx = requestcontext(self, repo, req, res)
367 rctx = requestcontext(self, repo, req, res)
368
368
369 # This state is global across all threads.
369 # This state is global across all threads.
370 encoding.encoding = rctx.config(b'web', b'encoding')
370 encoding.encoding = rctx.config(b'web', b'encoding')
371 rctx.repo.ui.environ = req.rawenv
371 rctx.repo.ui.environ = req.rawenv
372
372
373 if rctx.csp:
373 if rctx.csp:
374 # hgwebdir may have added CSP header. Since we generate our own,
374 # hgwebdir may have added CSP header. Since we generate our own,
375 # replace it.
375 # replace it.
376 res.headers[b'Content-Security-Policy'] = rctx.csp
376 res.headers[b'Content-Security-Policy'] = rctx.csp
377
377
378 handled = wireprotoserver.handlewsgirequest(
378 handled = wireprotoserver.handlewsgirequest(
379 rctx, req, res, self.check_perm
379 rctx, req, res, self.check_perm
380 )
380 )
381 if handled:
381 if handled:
382 return res.sendresponse()
382 return res.sendresponse()
383
383
384 # Old implementations of hgweb supported dispatching the request via
384 # Old implementations of hgweb supported dispatching the request via
385 # the initial query string parameter instead of using PATH_INFO.
385 # the initial query string parameter instead of using PATH_INFO.
386 # If PATH_INFO is present (signaled by ``req.dispatchpath`` having
386 # If PATH_INFO is present (signaled by ``req.dispatchpath`` having
387 # a value), we use it. Otherwise fall back to the query string.
387 # a value), we use it. Otherwise fall back to the query string.
388 if req.dispatchpath is not None:
388 if req.dispatchpath is not None:
389 query = req.dispatchpath
389 query = req.dispatchpath
390 else:
390 else:
391 query = req.querystring.partition(b'&')[0].partition(b';')[0]
391 query = req.querystring.partition(b'&')[0].partition(b';')[0]
392
392
393 # translate user-visible url structure to internal structure
393 # translate user-visible url structure to internal structure
394
394
395 args = query.split(b'/', 2)
395 args = query.split(b'/', 2)
396 if b'cmd' not in req.qsparams and args and args[0]:
396 if b'cmd' not in req.qsparams and args and args[0]:
397 cmd = args.pop(0)
397 cmd = args.pop(0)
398 style = cmd.rfind(b'-')
398 style = cmd.rfind(b'-')
399 if style != -1:
399 if style != -1:
400 req.qsparams[b'style'] = cmd[:style]
400 req.qsparams[b'style'] = cmd[:style]
401 cmd = cmd[style + 1 :]
401 cmd = cmd[style + 1 :]
402
402
403 # avoid accepting e.g. style parameter as command
403 # avoid accepting e.g. style parameter as command
404 if hasattr(webcommands, pycompat.sysstr(cmd)):
404 if hasattr(webcommands, pycompat.sysstr(cmd)):
405 req.qsparams[b'cmd'] = cmd
405 req.qsparams[b'cmd'] = cmd
406
406
407 if cmd == b'static':
407 if cmd == b'static':
408 req.qsparams[b'file'] = b'/'.join(args)
408 req.qsparams[b'file'] = b'/'.join(args)
409 else:
409 else:
410 if args and args[0]:
410 if args and args[0]:
411 node = args.pop(0).replace(b'%2F', b'/')
411 node = args.pop(0).replace(b'%2F', b'/')
412 req.qsparams[b'node'] = node
412 req.qsparams[b'node'] = node
413 if args:
413 if args:
414 if b'file' in req.qsparams:
414 if b'file' in req.qsparams:
415 del req.qsparams[b'file']
415 del req.qsparams[b'file']
416 for a in args:
416 for a in args:
417 req.qsparams.add(b'file', a)
417 req.qsparams.add(b'file', a)
418
418
419 ua = req.headers.get(b'User-Agent', b'')
419 ua = req.headers.get(b'User-Agent', b'')
420 if cmd == b'rev' and b'mercurial' in ua:
420 if cmd == b'rev' and b'mercurial' in ua:
421 req.qsparams[b'style'] = b'raw'
421 req.qsparams[b'style'] = b'raw'
422
422
423 if cmd == b'archive':
423 if cmd == b'archive':
424 fn = req.qsparams[b'node']
424 fn = req.qsparams[b'node']
425 for type_, spec in webutil.archivespecs.items():
425 for type_, spec in webutil.archivespecs.items():
426 ext = spec[2]
426 ext = spec[2]
427 if fn.endswith(ext):
427 if fn.endswith(ext):
428 req.qsparams[b'node'] = fn[: -len(ext)]
428 req.qsparams[b'node'] = fn[: -len(ext)]
429 req.qsparams[b'type'] = type_
429 req.qsparams[b'type'] = type_
430 else:
430 else:
431 cmd = req.qsparams.get(b'cmd', b'')
431 cmd = req.qsparams.get(b'cmd', b'')
432
432
433 # process the web interface request
433 # process the web interface request
434
434
435 try:
435 try:
436 rctx.tmpl = rctx.templater(req)
436 rctx.tmpl = rctx.templater(req)
437 ctype = rctx.tmpl.render(
437 ctype = rctx.tmpl.render(
438 b'mimetype', {b'encoding': encoding.encoding}
438 b'mimetype', {b'encoding': encoding.encoding}
439 )
439 )
440
440
441 # check read permissions non-static content
441 # check read permissions non-static content
442 if cmd != b'static':
442 if cmd != b'static':
443 self.check_perm(rctx, req, None)
443 self.check_perm(rctx, req, None)
444
444
445 if cmd == b'':
445 if cmd == b'':
446 req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
446 req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
447 cmd = req.qsparams[b'cmd']
447 cmd = req.qsparams[b'cmd']
448
448
449 # Don't enable caching if using a CSP nonce because then it wouldn't
449 # Don't enable caching if using a CSP nonce because then it wouldn't
450 # be a nonce.
450 # be a nonce.
451 if rctx.configbool(b'web', b'cache') and not rctx.nonce:
451 if rctx.configbool(b'web', b'cache') and not rctx.nonce:
452 tag = b'W/"%d"' % self.mtime
452 tag = b'W/"%d"' % self.mtime
453 if req.headers.get(b'If-None-Match') == tag:
453 if req.headers.get(b'If-None-Match') == tag:
454 res.status = b'304 Not Modified'
454 res.status = b'304 Not Modified'
455 # Content-Type may be defined globally. It isn't valid on a
455 # Content-Type may be defined globally. It isn't valid on a
456 # 304, so discard it.
456 # 304, so discard it.
457 try:
457 try:
458 del res.headers[b'Content-Type']
458 del res.headers[b'Content-Type']
459 except KeyError:
459 except KeyError:
460 pass
460 pass
461 # Response body not allowed on 304.
461 # Response body not allowed on 304.
462 res.setbodybytes(b'')
462 res.setbodybytes(b'')
463 return res.sendresponse()
463 return res.sendresponse()
464
464
465 res.headers[b'ETag'] = tag
465 res.headers[b'ETag'] = tag
466
466
467 if cmd not in webcommands.__all__:
467 if pycompat.sysstr(cmd) not in webcommands.__all__:
468 msg = b'no such method: %s' % cmd
468 msg = b'no such method: %s' % cmd
469 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
469 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
470 else:
470 else:
471 # Set some globals appropriate for web handlers. Commands can
471 # Set some globals appropriate for web handlers. Commands can
472 # override easily enough.
472 # override easily enough.
473 res.status = b'200 Script output follows'
473 res.status = b'200 Script output follows'
474 res.headers[b'Content-Type'] = ctype
474 res.headers[b'Content-Type'] = ctype
475 return getattr(webcommands, pycompat.sysstr(cmd))(rctx)
475 return getattr(webcommands, pycompat.sysstr(cmd))(rctx)
476
476
477 except (error.LookupError, error.RepoLookupError) as err:
477 except (error.LookupError, error.RepoLookupError) as err:
478 msg = pycompat.bytestr(err)
478 msg = pycompat.bytestr(err)
479 if hasattr(err, 'name') and not isinstance(
479 if hasattr(err, 'name') and not isinstance(
480 err, error.ManifestLookupError
480 err, error.ManifestLookupError
481 ):
481 ):
482 msg = b'revision not found: %s' % err.name
482 msg = b'revision not found: %s' % err.name
483
483
484 res.status = b'404 Not Found'
484 res.status = b'404 Not Found'
485 res.headers[b'Content-Type'] = ctype
485 res.headers[b'Content-Type'] = ctype
486 return rctx.sendtemplate(b'error', error=msg)
486 return rctx.sendtemplate(b'error', error=msg)
487 except (error.RepoError, error.StorageError) as e:
487 except (error.RepoError, error.StorageError) as e:
488 res.status = b'500 Internal Server Error'
488 res.status = b'500 Internal Server Error'
489 res.headers[b'Content-Type'] = ctype
489 res.headers[b'Content-Type'] = ctype
490 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
490 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
491 except error.Abort as e:
491 except error.Abort as e:
492 res.status = b'403 Forbidden'
492 res.status = b'403 Forbidden'
493 res.headers[b'Content-Type'] = ctype
493 res.headers[b'Content-Type'] = ctype
494 return rctx.sendtemplate(b'error', error=e.message)
494 return rctx.sendtemplate(b'error', error=e.message)
495 except ErrorResponse as e:
495 except ErrorResponse as e:
496 for k, v in e.headers:
496 for k, v in e.headers:
497 res.headers[k] = v
497 res.headers[k] = v
498 res.status = statusmessage(e.code, pycompat.bytestr(e))
498 res.status = statusmessage(e.code, pycompat.bytestr(e))
499 res.headers[b'Content-Type'] = ctype
499 res.headers[b'Content-Type'] = ctype
500 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
500 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
501
501
502 def check_perm(self, rctx, req, op):
502 def check_perm(self, rctx, req, op):
503 for permhook in permhooks:
503 for permhook in permhooks:
504 permhook(rctx, req, op)
504 permhook(rctx, req, op)
505
505
506
506
507 def getwebview(repo):
507 def getwebview(repo):
508 """The 'web.view' config controls changeset filter to hgweb. Possible
508 """The 'web.view' config controls changeset filter to hgweb. Possible
509 values are ``served``, ``visible`` and ``all``. Default is ``served``.
509 values are ``served``, ``visible`` and ``all``. Default is ``served``.
510 The ``served`` filter only shows changesets that can be pulled from the
510 The ``served`` filter only shows changesets that can be pulled from the
511 hgweb instance. The``visible`` filter includes secret changesets but
511 hgweb instance. The``visible`` filter includes secret changesets but
512 still excludes "hidden" one.
512 still excludes "hidden" one.
513
513
514 See the repoview module for details.
514 See the repoview module for details.
515
515
516 The option has been around undocumented since Mercurial 2.5, but no
516 The option has been around undocumented since Mercurial 2.5, but no
517 user ever asked about it. So we better keep it undocumented for now."""
517 user ever asked about it. So we better keep it undocumented for now."""
518 # experimental config: web.view
518 # experimental config: web.view
519 viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
519 viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
520 if viewconfig == b'all':
520 if viewconfig == b'all':
521 return repo.unfiltered()
521 return repo.unfiltered()
522 elif viewconfig in repoview.filtertable:
522 elif viewconfig in repoview.filtertable:
523 return repo.filtered(viewconfig)
523 return repo.filtered(viewconfig)
524 else:
524 else:
525 return repo.filtered(b'served')
525 return repo.filtered(b'served')
@@ -1,1596 +1,1596
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import copy
9 import copy
10 import mimetypes
10 import mimetypes
11 import os
11 import os
12 import re
12 import re
13
13
14 from ..i18n import _
14 from ..i18n import _
15 from ..node import hex, short
15 from ..node import hex, short
16
16
17 from .common import (
17 from .common import (
18 ErrorResponse,
18 ErrorResponse,
19 HTTP_FORBIDDEN,
19 HTTP_FORBIDDEN,
20 HTTP_NOT_FOUND,
20 HTTP_NOT_FOUND,
21 get_contact,
21 get_contact,
22 paritygen,
22 paritygen,
23 staticfile,
23 staticfile,
24 )
24 )
25
25
26 from .. import (
26 from .. import (
27 archival,
27 archival,
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 graphmod,
31 graphmod,
32 pycompat,
32 pycompat,
33 revset,
33 revset,
34 revsetlang,
34 revsetlang,
35 scmutil,
35 scmutil,
36 smartset,
36 smartset,
37 templateutil,
37 templateutil,
38 )
38 )
39
39
40 from ..utils import stringutil
40 from ..utils import stringutil
41
41
42 from . import webutil
42 from . import webutil
43
43
44 __all__ = []
44 __all__ = []
45 commands = {}
45 commands = {}
46
46
47
47
48 class webcommand:
48 class webcommand:
49 """Decorator used to register a web command handler.
49 """Decorator used to register a web command handler.
50
50
51 The decorator takes as its positional arguments the name/path the
51 The decorator takes as its positional arguments the name/path the
52 command should be accessible under.
52 command should be accessible under.
53
53
54 When called, functions receive as arguments a ``requestcontext``,
54 When called, functions receive as arguments a ``requestcontext``,
55 ``wsgirequest``, and a templater instance for generatoring output.
55 ``wsgirequest``, and a templater instance for generatoring output.
56 The functions should populate the ``rctx.res`` object with details
56 The functions should populate the ``rctx.res`` object with details
57 about the HTTP response.
57 about the HTTP response.
58
58
59 The function returns a generator to be consumed by the WSGI application.
59 The function returns a generator to be consumed by the WSGI application.
60 For most commands, this should be the result from
60 For most commands, this should be the result from
61 ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()``
61 ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()``
62 to render a template.
62 to render a template.
63
63
64 Usage:
64 Usage:
65
65
66 @webcommand('mycommand')
66 @webcommand('mycommand')
67 def mycommand(web):
67 def mycommand(web):
68 pass
68 pass
69 """
69 """
70
70
71 def __init__(self, name):
71 def __init__(self, name):
72 self.name = name
72 self.name = name
73
73
74 def __call__(self, func):
74 def __call__(self, func):
75 __all__.append(self.name)
75 __all__.append(pycompat.sysstr(self.name))
76 commands[self.name] = func
76 commands[self.name] = func
77 return func
77 return func
78
78
79
79
80 @webcommand(b'log')
80 @webcommand(b'log')
81 def log(web):
81 def log(web):
82 """
82 """
83 /log[/{revision}[/{path}]]
83 /log[/{revision}[/{path}]]
84 --------------------------
84 --------------------------
85
85
86 Show repository or file history.
86 Show repository or file history.
87
87
88 For URLs of the form ``/log/{revision}``, a list of changesets starting at
88 For URLs of the form ``/log/{revision}``, a list of changesets starting at
89 the specified changeset identifier is shown. If ``{revision}`` is not
89 the specified changeset identifier is shown. If ``{revision}`` is not
90 defined, the default is ``tip``. This form is equivalent to the
90 defined, the default is ``tip``. This form is equivalent to the
91 ``changelog`` handler.
91 ``changelog`` handler.
92
92
93 For URLs of the form ``/log/{revision}/{file}``, the history for a specific
93 For URLs of the form ``/log/{revision}/{file}``, the history for a specific
94 file will be shown. This form is equivalent to the ``filelog`` handler.
94 file will be shown. This form is equivalent to the ``filelog`` handler.
95 """
95 """
96
96
97 if web.req.qsparams.get(b'file'):
97 if web.req.qsparams.get(b'file'):
98 return filelog(web)
98 return filelog(web)
99 else:
99 else:
100 return changelog(web)
100 return changelog(web)
101
101
102
102
103 @webcommand(b'rawfile')
103 @webcommand(b'rawfile')
104 def rawfile(web):
104 def rawfile(web):
105 guessmime = web.configbool(b'web', b'guessmime')
105 guessmime = web.configbool(b'web', b'guessmime')
106
106
107 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
107 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
108 if not path:
108 if not path:
109 return manifest(web)
109 return manifest(web)
110
110
111 try:
111 try:
112 fctx = webutil.filectx(web.repo, web.req)
112 fctx = webutil.filectx(web.repo, web.req)
113 except error.LookupError as inst:
113 except error.LookupError as inst:
114 try:
114 try:
115 return manifest(web)
115 return manifest(web)
116 except ErrorResponse:
116 except ErrorResponse:
117 raise inst
117 raise inst
118
118
119 path = fctx.path()
119 path = fctx.path()
120 text = fctx.data()
120 text = fctx.data()
121 mt = b'application/binary'
121 mt = b'application/binary'
122 if guessmime:
122 if guessmime:
123 mt = mimetypes.guess_type(pycompat.fsdecode(path))[0]
123 mt = mimetypes.guess_type(pycompat.fsdecode(path))[0]
124 if mt is None:
124 if mt is None:
125 if stringutil.binary(text):
125 if stringutil.binary(text):
126 mt = b'application/binary'
126 mt = b'application/binary'
127 else:
127 else:
128 mt = b'text/plain'
128 mt = b'text/plain'
129 else:
129 else:
130 mt = pycompat.sysbytes(mt)
130 mt = pycompat.sysbytes(mt)
131
131
132 if mt.startswith(b'text/'):
132 if mt.startswith(b'text/'):
133 mt += b'; charset="%s"' % encoding.encoding
133 mt += b'; charset="%s"' % encoding.encoding
134
134
135 web.res.headers[b'Content-Type'] = mt
135 web.res.headers[b'Content-Type'] = mt
136 filename = (
136 filename = (
137 path.rpartition(b'/')[-1].replace(b'\\', b'\\\\').replace(b'"', b'\\"')
137 path.rpartition(b'/')[-1].replace(b'\\', b'\\\\').replace(b'"', b'\\"')
138 )
138 )
139 web.res.headers[b'Content-Disposition'] = (
139 web.res.headers[b'Content-Disposition'] = (
140 b'inline; filename="%s"' % filename
140 b'inline; filename="%s"' % filename
141 )
141 )
142 web.res.setbodybytes(text)
142 web.res.setbodybytes(text)
143 return web.res.sendresponse()
143 return web.res.sendresponse()
144
144
145
145
146 def _filerevision(web, fctx):
146 def _filerevision(web, fctx):
147 f = fctx.path()
147 f = fctx.path()
148 text = fctx.data()
148 text = fctx.data()
149 parity = paritygen(web.stripecount)
149 parity = paritygen(web.stripecount)
150 ishead = fctx.filenode() in fctx.filelog().heads()
150 ishead = fctx.filenode() in fctx.filelog().heads()
151
151
152 if stringutil.binary(text):
152 if stringutil.binary(text):
153 mt = pycompat.sysbytes(
153 mt = pycompat.sysbytes(
154 mimetypes.guess_type(pycompat.fsdecode(f))[0]
154 mimetypes.guess_type(pycompat.fsdecode(f))[0]
155 or r'application/octet-stream'
155 or r'application/octet-stream'
156 )
156 )
157 text = b'(binary:%s)' % mt
157 text = b'(binary:%s)' % mt
158
158
159 def lines(context):
159 def lines(context):
160 for lineno, t in enumerate(text.splitlines(True)):
160 for lineno, t in enumerate(text.splitlines(True)):
161 yield {
161 yield {
162 b"line": t,
162 b"line": t,
163 b"lineid": b"l%d" % (lineno + 1),
163 b"lineid": b"l%d" % (lineno + 1),
164 b"linenumber": b"% 6d" % (lineno + 1),
164 b"linenumber": b"% 6d" % (lineno + 1),
165 b"parity": next(parity),
165 b"parity": next(parity),
166 }
166 }
167
167
168 return web.sendtemplate(
168 return web.sendtemplate(
169 b'filerevision',
169 b'filerevision',
170 file=f,
170 file=f,
171 path=webutil.up(f),
171 path=webutil.up(f),
172 text=templateutil.mappinggenerator(lines),
172 text=templateutil.mappinggenerator(lines),
173 symrev=webutil.symrevorshortnode(web.req, fctx),
173 symrev=webutil.symrevorshortnode(web.req, fctx),
174 rename=webutil.renamelink(fctx),
174 rename=webutil.renamelink(fctx),
175 permissions=fctx.manifest().flags(f),
175 permissions=fctx.manifest().flags(f),
176 ishead=int(ishead),
176 ishead=int(ishead),
177 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
177 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
178 )
178 )
179
179
180
180
181 @webcommand(b'file')
181 @webcommand(b'file')
182 def file(web):
182 def file(web):
183 """
183 """
184 /file/{revision}[/{path}]
184 /file/{revision}[/{path}]
185 -------------------------
185 -------------------------
186
186
187 Show information about a directory or file in the repository.
187 Show information about a directory or file in the repository.
188
188
189 Info about the ``path`` given as a URL parameter will be rendered.
189 Info about the ``path`` given as a URL parameter will be rendered.
190
190
191 If ``path`` is a directory, information about the entries in that
191 If ``path`` is a directory, information about the entries in that
192 directory will be rendered. This form is equivalent to the ``manifest``
192 directory will be rendered. This form is equivalent to the ``manifest``
193 handler.
193 handler.
194
194
195 If ``path`` is a file, information about that file will be shown via
195 If ``path`` is a file, information about that file will be shown via
196 the ``filerevision`` template.
196 the ``filerevision`` template.
197
197
198 If ``path`` is not defined, information about the root directory will
198 If ``path`` is not defined, information about the root directory will
199 be rendered.
199 be rendered.
200 """
200 """
201 if web.req.qsparams.get(b'style') == b'raw':
201 if web.req.qsparams.get(b'style') == b'raw':
202 return rawfile(web)
202 return rawfile(web)
203
203
204 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
204 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
205 if not path:
205 if not path:
206 return manifest(web)
206 return manifest(web)
207 try:
207 try:
208 return _filerevision(web, webutil.filectx(web.repo, web.req))
208 return _filerevision(web, webutil.filectx(web.repo, web.req))
209 except error.LookupError as inst:
209 except error.LookupError as inst:
210 try:
210 try:
211 return manifest(web)
211 return manifest(web)
212 except ErrorResponse:
212 except ErrorResponse:
213 raise inst
213 raise inst
214
214
215
215
216 def _search(web):
216 def _search(web):
217 MODE_REVISION = b'rev'
217 MODE_REVISION = b'rev'
218 MODE_KEYWORD = b'keyword'
218 MODE_KEYWORD = b'keyword'
219 MODE_REVSET = b'revset'
219 MODE_REVSET = b'revset'
220
220
221 def revsearch(ctx):
221 def revsearch(ctx):
222 yield ctx
222 yield ctx
223
223
224 def keywordsearch(query):
224 def keywordsearch(query):
225 lower = encoding.lower
225 lower = encoding.lower
226 qw = lower(query).split()
226 qw = lower(query).split()
227
227
228 def revgen():
228 def revgen():
229 cl = web.repo.changelog
229 cl = web.repo.changelog
230 for i in range(len(web.repo) - 1, 0, -100):
230 for i in range(len(web.repo) - 1, 0, -100):
231 l = []
231 l = []
232 for j in cl.revs(max(0, i - 99), i):
232 for j in cl.revs(max(0, i - 99), i):
233 ctx = web.repo[j]
233 ctx = web.repo[j]
234 l.append(ctx)
234 l.append(ctx)
235 l.reverse()
235 l.reverse()
236 for e in l:
236 for e in l:
237 yield e
237 yield e
238
238
239 for ctx in revgen():
239 for ctx in revgen():
240 miss = 0
240 miss = 0
241 for q in qw:
241 for q in qw:
242 if not (
242 if not (
243 q in lower(ctx.user())
243 q in lower(ctx.user())
244 or q in lower(ctx.description())
244 or q in lower(ctx.description())
245 or q in lower(b" ".join(ctx.files()))
245 or q in lower(b" ".join(ctx.files()))
246 ):
246 ):
247 miss = 1
247 miss = 1
248 break
248 break
249 if miss:
249 if miss:
250 continue
250 continue
251
251
252 yield ctx
252 yield ctx
253
253
254 def revsetsearch(revs):
254 def revsetsearch(revs):
255 for r in revs:
255 for r in revs:
256 yield web.repo[r]
256 yield web.repo[r]
257
257
258 searchfuncs = {
258 searchfuncs = {
259 MODE_REVISION: (revsearch, b'exact revision search'),
259 MODE_REVISION: (revsearch, b'exact revision search'),
260 MODE_KEYWORD: (keywordsearch, b'literal keyword search'),
260 MODE_KEYWORD: (keywordsearch, b'literal keyword search'),
261 MODE_REVSET: (revsetsearch, b'revset expression search'),
261 MODE_REVSET: (revsetsearch, b'revset expression search'),
262 }
262 }
263
263
264 def getsearchmode(query):
264 def getsearchmode(query):
265 try:
265 try:
266 ctx = scmutil.revsymbol(web.repo, query)
266 ctx = scmutil.revsymbol(web.repo, query)
267 except (error.RepoError, error.LookupError):
267 except (error.RepoError, error.LookupError):
268 # query is not an exact revision pointer, need to
268 # query is not an exact revision pointer, need to
269 # decide if it's a revset expression or keywords
269 # decide if it's a revset expression or keywords
270 pass
270 pass
271 else:
271 else:
272 return MODE_REVISION, ctx
272 return MODE_REVISION, ctx
273
273
274 revdef = b'reverse(%s)' % query
274 revdef = b'reverse(%s)' % query
275 try:
275 try:
276 tree = revsetlang.parse(revdef)
276 tree = revsetlang.parse(revdef)
277 except error.ParseError:
277 except error.ParseError:
278 # can't parse to a revset tree
278 # can't parse to a revset tree
279 return MODE_KEYWORD, query
279 return MODE_KEYWORD, query
280
280
281 if revsetlang.depth(tree) <= 2:
281 if revsetlang.depth(tree) <= 2:
282 # no revset syntax used
282 # no revset syntax used
283 return MODE_KEYWORD, query
283 return MODE_KEYWORD, query
284
284
285 if any(
285 if any(
286 (token, (value or b'')[:3]) == (b'string', b're:')
286 (token, (value or b'')[:3]) == (b'string', b're:')
287 for token, value, pos in revsetlang.tokenize(revdef)
287 for token, value, pos in revsetlang.tokenize(revdef)
288 ):
288 ):
289 return MODE_KEYWORD, query
289 return MODE_KEYWORD, query
290
290
291 funcsused = revsetlang.funcsused(tree)
291 funcsused = revsetlang.funcsused(tree)
292 if not funcsused.issubset(revset.safesymbols):
292 if not funcsused.issubset(revset.safesymbols):
293 return MODE_KEYWORD, query
293 return MODE_KEYWORD, query
294
294
295 try:
295 try:
296 mfunc = revset.match(
296 mfunc = revset.match(
297 web.repo.ui, revdef, lookup=revset.lookupfn(web.repo)
297 web.repo.ui, revdef, lookup=revset.lookupfn(web.repo)
298 )
298 )
299 revs = mfunc(web.repo)
299 revs = mfunc(web.repo)
300 return MODE_REVSET, revs
300 return MODE_REVSET, revs
301 # ParseError: wrongly placed tokens, wrongs arguments, etc
301 # ParseError: wrongly placed tokens, wrongs arguments, etc
302 # RepoLookupError: no such revision, e.g. in 'revision:'
302 # RepoLookupError: no such revision, e.g. in 'revision:'
303 # Abort: bookmark/tag not exists
303 # Abort: bookmark/tag not exists
304 # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
304 # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
305 except (
305 except (
306 error.ParseError,
306 error.ParseError,
307 error.RepoLookupError,
307 error.RepoLookupError,
308 error.Abort,
308 error.Abort,
309 LookupError,
309 LookupError,
310 ):
310 ):
311 return MODE_KEYWORD, query
311 return MODE_KEYWORD, query
312
312
313 def changelist(context):
313 def changelist(context):
314 count = 0
314 count = 0
315
315
316 for ctx in searchfunc[0](funcarg):
316 for ctx in searchfunc[0](funcarg):
317 count += 1
317 count += 1
318 n = scmutil.binnode(ctx)
318 n = scmutil.binnode(ctx)
319 showtags = webutil.showtag(web.repo, b'changelogtag', n)
319 showtags = webutil.showtag(web.repo, b'changelogtag', n)
320 files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
320 files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
321
321
322 lm = webutil.commonentry(web.repo, ctx)
322 lm = webutil.commonentry(web.repo, ctx)
323 lm.update(
323 lm.update(
324 {
324 {
325 b'parity': next(parity),
325 b'parity': next(parity),
326 b'changelogtag': showtags,
326 b'changelogtag': showtags,
327 b'files': files,
327 b'files': files,
328 }
328 }
329 )
329 )
330 yield lm
330 yield lm
331
331
332 if count >= revcount:
332 if count >= revcount:
333 break
333 break
334
334
335 query = web.req.qsparams[b'rev']
335 query = web.req.qsparams[b'rev']
336 revcount = web.maxchanges
336 revcount = web.maxchanges
337 if b'revcount' in web.req.qsparams:
337 if b'revcount' in web.req.qsparams:
338 try:
338 try:
339 revcount = int(web.req.qsparams.get(b'revcount', revcount))
339 revcount = int(web.req.qsparams.get(b'revcount', revcount))
340 revcount = max(revcount, 1)
340 revcount = max(revcount, 1)
341 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
341 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
342 except ValueError:
342 except ValueError:
343 pass
343 pass
344
344
345 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
345 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
346 lessvars[b'revcount'] = max(revcount // 2, 1)
346 lessvars[b'revcount'] = max(revcount // 2, 1)
347 lessvars[b'rev'] = query
347 lessvars[b'rev'] = query
348 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
348 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
349 morevars[b'revcount'] = revcount * 2
349 morevars[b'revcount'] = revcount * 2
350 morevars[b'rev'] = query
350 morevars[b'rev'] = query
351
351
352 mode, funcarg = getsearchmode(query)
352 mode, funcarg = getsearchmode(query)
353
353
354 if b'forcekw' in web.req.qsparams:
354 if b'forcekw' in web.req.qsparams:
355 showforcekw = b''
355 showforcekw = b''
356 showunforcekw = searchfuncs[mode][1]
356 showunforcekw = searchfuncs[mode][1]
357 mode = MODE_KEYWORD
357 mode = MODE_KEYWORD
358 funcarg = query
358 funcarg = query
359 else:
359 else:
360 if mode != MODE_KEYWORD:
360 if mode != MODE_KEYWORD:
361 showforcekw = searchfuncs[MODE_KEYWORD][1]
361 showforcekw = searchfuncs[MODE_KEYWORD][1]
362 else:
362 else:
363 showforcekw = b''
363 showforcekw = b''
364 showunforcekw = b''
364 showunforcekw = b''
365
365
366 searchfunc = searchfuncs[mode]
366 searchfunc = searchfuncs[mode]
367
367
368 tip = web.repo[b'tip']
368 tip = web.repo[b'tip']
369 parity = paritygen(web.stripecount)
369 parity = paritygen(web.stripecount)
370
370
371 return web.sendtemplate(
371 return web.sendtemplate(
372 b'search',
372 b'search',
373 query=query,
373 query=query,
374 node=tip.hex(),
374 node=tip.hex(),
375 symrev=b'tip',
375 symrev=b'tip',
376 entries=templateutil.mappinggenerator(changelist, name=b'searchentry'),
376 entries=templateutil.mappinggenerator(changelist, name=b'searchentry'),
377 archives=web.archivelist(b'tip'),
377 archives=web.archivelist(b'tip'),
378 morevars=morevars,
378 morevars=morevars,
379 lessvars=lessvars,
379 lessvars=lessvars,
380 modedesc=searchfunc[1],
380 modedesc=searchfunc[1],
381 showforcekw=showforcekw,
381 showforcekw=showforcekw,
382 showunforcekw=showunforcekw,
382 showunforcekw=showunforcekw,
383 )
383 )
384
384
385
385
386 @webcommand(b'changelog')
386 @webcommand(b'changelog')
387 def changelog(web, shortlog=False):
387 def changelog(web, shortlog=False):
388 """
388 """
389 /changelog[/{revision}]
389 /changelog[/{revision}]
390 -----------------------
390 -----------------------
391
391
392 Show information about multiple changesets.
392 Show information about multiple changesets.
393
393
394 If the optional ``revision`` URL argument is absent, information about
394 If the optional ``revision`` URL argument is absent, information about
395 all changesets starting at ``tip`` will be rendered. If the ``revision``
395 all changesets starting at ``tip`` will be rendered. If the ``revision``
396 argument is present, changesets will be shown starting from the specified
396 argument is present, changesets will be shown starting from the specified
397 revision.
397 revision.
398
398
399 If ``revision`` is absent, the ``rev`` query string argument may be
399 If ``revision`` is absent, the ``rev`` query string argument may be
400 defined. This will perform a search for changesets.
400 defined. This will perform a search for changesets.
401
401
402 The argument for ``rev`` can be a single revision, a revision set,
402 The argument for ``rev`` can be a single revision, a revision set,
403 or a literal keyword to search for in changeset data (equivalent to
403 or a literal keyword to search for in changeset data (equivalent to
404 :hg:`log -k`).
404 :hg:`log -k`).
405
405
406 The ``revcount`` query string argument defines the maximum numbers of
406 The ``revcount`` query string argument defines the maximum numbers of
407 changesets to render.
407 changesets to render.
408
408
409 For non-searches, the ``changelog`` template will be rendered.
409 For non-searches, the ``changelog`` template will be rendered.
410 """
410 """
411
411
412 query = b''
412 query = b''
413 if b'node' in web.req.qsparams:
413 if b'node' in web.req.qsparams:
414 ctx = webutil.changectx(web.repo, web.req)
414 ctx = webutil.changectx(web.repo, web.req)
415 symrev = webutil.symrevorshortnode(web.req, ctx)
415 symrev = webutil.symrevorshortnode(web.req, ctx)
416 elif b'rev' in web.req.qsparams:
416 elif b'rev' in web.req.qsparams:
417 return _search(web)
417 return _search(web)
418 else:
418 else:
419 ctx = web.repo[b'tip']
419 ctx = web.repo[b'tip']
420 symrev = b'tip'
420 symrev = b'tip'
421
421
422 def changelist(maxcount):
422 def changelist(maxcount):
423 revs = []
423 revs = []
424 if pos != -1:
424 if pos != -1:
425 revs = web.repo.changelog.revs(pos, 0)
425 revs = web.repo.changelog.revs(pos, 0)
426
426
427 for entry in webutil.changelistentries(web, revs, maxcount, parity):
427 for entry in webutil.changelistentries(web, revs, maxcount, parity):
428 yield entry
428 yield entry
429
429
430 if shortlog:
430 if shortlog:
431 revcount = web.maxshortchanges
431 revcount = web.maxshortchanges
432 else:
432 else:
433 revcount = web.maxchanges
433 revcount = web.maxchanges
434
434
435 if b'revcount' in web.req.qsparams:
435 if b'revcount' in web.req.qsparams:
436 try:
436 try:
437 revcount = int(web.req.qsparams.get(b'revcount', revcount))
437 revcount = int(web.req.qsparams.get(b'revcount', revcount))
438 revcount = max(revcount, 1)
438 revcount = max(revcount, 1)
439 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
439 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
440 except ValueError:
440 except ValueError:
441 pass
441 pass
442
442
443 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
443 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
444 lessvars[b'revcount'] = max(revcount // 2, 1)
444 lessvars[b'revcount'] = max(revcount // 2, 1)
445 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
445 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
446 morevars[b'revcount'] = revcount * 2
446 morevars[b'revcount'] = revcount * 2
447
447
448 count = len(web.repo)
448 count = len(web.repo)
449 pos = ctx.rev()
449 pos = ctx.rev()
450 parity = paritygen(web.stripecount)
450 parity = paritygen(web.stripecount)
451
451
452 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
452 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
453
453
454 entries = list(changelist(revcount + 1))
454 entries = list(changelist(revcount + 1))
455 latestentry = entries[:1]
455 latestentry = entries[:1]
456 if len(entries) > revcount:
456 if len(entries) > revcount:
457 nextentry = entries[-1:]
457 nextentry = entries[-1:]
458 entries = entries[:-1]
458 entries = entries[:-1]
459 else:
459 else:
460 nextentry = []
460 nextentry = []
461
461
462 return web.sendtemplate(
462 return web.sendtemplate(
463 b'shortlog' if shortlog else b'changelog',
463 b'shortlog' if shortlog else b'changelog',
464 changenav=changenav,
464 changenav=changenav,
465 node=ctx.hex(),
465 node=ctx.hex(),
466 rev=pos,
466 rev=pos,
467 symrev=symrev,
467 symrev=symrev,
468 changesets=count,
468 changesets=count,
469 entries=templateutil.mappinglist(entries),
469 entries=templateutil.mappinglist(entries),
470 latestentry=templateutil.mappinglist(latestentry),
470 latestentry=templateutil.mappinglist(latestentry),
471 nextentry=templateutil.mappinglist(nextentry),
471 nextentry=templateutil.mappinglist(nextentry),
472 archives=web.archivelist(b'tip'),
472 archives=web.archivelist(b'tip'),
473 revcount=revcount,
473 revcount=revcount,
474 morevars=morevars,
474 morevars=morevars,
475 lessvars=lessvars,
475 lessvars=lessvars,
476 query=query,
476 query=query,
477 )
477 )
478
478
479
479
480 @webcommand(b'shortlog')
480 @webcommand(b'shortlog')
481 def shortlog(web):
481 def shortlog(web):
482 """
482 """
483 /shortlog
483 /shortlog
484 ---------
484 ---------
485
485
486 Show basic information about a set of changesets.
486 Show basic information about a set of changesets.
487
487
488 This accepts the same parameters as the ``changelog`` handler. The only
488 This accepts the same parameters as the ``changelog`` handler. The only
489 difference is the ``shortlog`` template will be rendered instead of the
489 difference is the ``shortlog`` template will be rendered instead of the
490 ``changelog`` template.
490 ``changelog`` template.
491 """
491 """
492 return changelog(web, shortlog=True)
492 return changelog(web, shortlog=True)
493
493
494
494
495 @webcommand(b'changeset')
495 @webcommand(b'changeset')
496 def changeset(web):
496 def changeset(web):
497 """
497 """
498 /changeset[/{revision}]
498 /changeset[/{revision}]
499 -----------------------
499 -----------------------
500
500
501 Show information about a single changeset.
501 Show information about a single changeset.
502
502
503 A URL path argument is the changeset identifier to show. See ``hg help
503 A URL path argument is the changeset identifier to show. See ``hg help
504 revisions`` for possible values. If not defined, the ``tip`` changeset
504 revisions`` for possible values. If not defined, the ``tip`` changeset
505 will be shown.
505 will be shown.
506
506
507 The ``changeset`` template is rendered. Contents of the ``changesettag``,
507 The ``changeset`` template is rendered. Contents of the ``changesettag``,
508 ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many
508 ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many
509 templates related to diffs may all be used to produce the output.
509 templates related to diffs may all be used to produce the output.
510 """
510 """
511 ctx = webutil.changectx(web.repo, web.req)
511 ctx = webutil.changectx(web.repo, web.req)
512
512
513 return web.sendtemplate(b'changeset', **webutil.changesetentry(web, ctx))
513 return web.sendtemplate(b'changeset', **webutil.changesetentry(web, ctx))
514
514
515
515
516 rev = webcommand(b'rev')(changeset)
516 rev = webcommand(b'rev')(changeset)
517
517
518
518
519 def decodepath(path: bytes) -> bytes:
519 def decodepath(path: bytes) -> bytes:
520 """Hook for mapping a path in the repository to a path in the
520 """Hook for mapping a path in the repository to a path in the
521 working copy.
521 working copy.
522
522
523 Extensions (e.g., largefiles) can override this to remap files in
523 Extensions (e.g., largefiles) can override this to remap files in
524 the virtual file system presented by the manifest command below."""
524 the virtual file system presented by the manifest command below."""
525 return path
525 return path
526
526
527
527
528 @webcommand(b'manifest')
528 @webcommand(b'manifest')
529 def manifest(web):
529 def manifest(web):
530 """
530 """
531 /manifest[/{revision}[/{path}]]
531 /manifest[/{revision}[/{path}]]
532 -------------------------------
532 -------------------------------
533
533
534 Show information about a directory.
534 Show information about a directory.
535
535
536 If the URL path arguments are omitted, information about the root
536 If the URL path arguments are omitted, information about the root
537 directory for the ``tip`` changeset will be shown.
537 directory for the ``tip`` changeset will be shown.
538
538
539 Because this handler can only show information for directories, it
539 Because this handler can only show information for directories, it
540 is recommended to use the ``file`` handler instead, as it can handle both
540 is recommended to use the ``file`` handler instead, as it can handle both
541 directories and files.
541 directories and files.
542
542
543 The ``manifest`` template will be rendered for this handler.
543 The ``manifest`` template will be rendered for this handler.
544 """
544 """
545 if b'node' in web.req.qsparams:
545 if b'node' in web.req.qsparams:
546 ctx = webutil.changectx(web.repo, web.req)
546 ctx = webutil.changectx(web.repo, web.req)
547 symrev = webutil.symrevorshortnode(web.req, ctx)
547 symrev = webutil.symrevorshortnode(web.req, ctx)
548 else:
548 else:
549 ctx = web.repo[b'tip']
549 ctx = web.repo[b'tip']
550 symrev = b'tip'
550 symrev = b'tip'
551 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
551 path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
552 mf = ctx.manifest()
552 mf = ctx.manifest()
553 node = scmutil.binnode(ctx)
553 node = scmutil.binnode(ctx)
554
554
555 files = {}
555 files = {}
556 dirs = {}
556 dirs = {}
557 parity = paritygen(web.stripecount)
557 parity = paritygen(web.stripecount)
558
558
559 if path and path[-1:] != b"/":
559 if path and path[-1:] != b"/":
560 path += b"/"
560 path += b"/"
561 l = len(path)
561 l = len(path)
562 abspath = b"/" + path
562 abspath = b"/" + path
563
563
564 for full, n in mf.items():
564 for full, n in mf.items():
565 # the virtual path (working copy path) used for the full
565 # the virtual path (working copy path) used for the full
566 # (repository) path
566 # (repository) path
567 f = decodepath(full)
567 f = decodepath(full)
568
568
569 if f[:l] != path:
569 if f[:l] != path:
570 continue
570 continue
571 remain = f[l:]
571 remain = f[l:]
572 elements = remain.split(b'/')
572 elements = remain.split(b'/')
573 if len(elements) == 1:
573 if len(elements) == 1:
574 files[remain] = full
574 files[remain] = full
575 else:
575 else:
576 h = dirs # need to retain ref to dirs (root)
576 h = dirs # need to retain ref to dirs (root)
577 for elem in elements[0:-1]:
577 for elem in elements[0:-1]:
578 if elem not in h:
578 if elem not in h:
579 h[elem] = {}
579 h[elem] = {}
580 h = h[elem]
580 h = h[elem]
581 if len(h) > 1:
581 if len(h) > 1:
582 break
582 break
583 h[None] = None # denotes files present
583 h[None] = None # denotes files present
584
584
585 if mf and not files and not dirs:
585 if mf and not files and not dirs:
586 raise ErrorResponse(HTTP_NOT_FOUND, b'path not found: ' + path)
586 raise ErrorResponse(HTTP_NOT_FOUND, b'path not found: ' + path)
587
587
588 def filelist(context):
588 def filelist(context):
589 for f in sorted(files):
589 for f in sorted(files):
590 full = files[f]
590 full = files[f]
591
591
592 fctx = ctx.filectx(full)
592 fctx = ctx.filectx(full)
593 yield {
593 yield {
594 b"file": full,
594 b"file": full,
595 b"parity": next(parity),
595 b"parity": next(parity),
596 b"basename": f,
596 b"basename": f,
597 b"date": fctx.date(),
597 b"date": fctx.date(),
598 b"size": fctx.size(),
598 b"size": fctx.size(),
599 b"permissions": mf.flags(full),
599 b"permissions": mf.flags(full),
600 }
600 }
601
601
602 def dirlist(context):
602 def dirlist(context):
603 for d in sorted(dirs):
603 for d in sorted(dirs):
604 emptydirs = []
604 emptydirs = []
605 h = dirs[d]
605 h = dirs[d]
606 while isinstance(h, dict) and len(h) == 1:
606 while isinstance(h, dict) and len(h) == 1:
607 k, v = next(iter(h.items()))
607 k, v = next(iter(h.items()))
608 if v:
608 if v:
609 emptydirs.append(k)
609 emptydirs.append(k)
610 h = v
610 h = v
611
611
612 path = b"%s%s" % (abspath, d)
612 path = b"%s%s" % (abspath, d)
613 yield {
613 yield {
614 b"parity": next(parity),
614 b"parity": next(parity),
615 b"path": path,
615 b"path": path,
616 # pytype: disable=wrong-arg-types
616 # pytype: disable=wrong-arg-types
617 b"emptydirs": b"/".join(emptydirs),
617 b"emptydirs": b"/".join(emptydirs),
618 # pytype: enable=wrong-arg-types
618 # pytype: enable=wrong-arg-types
619 b"basename": d,
619 b"basename": d,
620 }
620 }
621
621
622 return web.sendtemplate(
622 return web.sendtemplate(
623 b'manifest',
623 b'manifest',
624 symrev=symrev,
624 symrev=symrev,
625 path=abspath,
625 path=abspath,
626 up=webutil.up(abspath),
626 up=webutil.up(abspath),
627 upparity=next(parity),
627 upparity=next(parity),
628 fentries=templateutil.mappinggenerator(filelist),
628 fentries=templateutil.mappinggenerator(filelist),
629 dentries=templateutil.mappinggenerator(dirlist),
629 dentries=templateutil.mappinggenerator(dirlist),
630 archives=web.archivelist(hex(node)),
630 archives=web.archivelist(hex(node)),
631 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
631 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
632 )
632 )
633
633
634
634
635 @webcommand(b'tags')
635 @webcommand(b'tags')
636 def tags(web):
636 def tags(web):
637 """
637 """
638 /tags
638 /tags
639 -----
639 -----
640
640
641 Show information about tags.
641 Show information about tags.
642
642
643 No arguments are accepted.
643 No arguments are accepted.
644
644
645 The ``tags`` template is rendered.
645 The ``tags`` template is rendered.
646 """
646 """
647 i = list(reversed(web.repo.tagslist()))
647 i = list(reversed(web.repo.tagslist()))
648 parity = paritygen(web.stripecount)
648 parity = paritygen(web.stripecount)
649
649
650 def entries(context, notip, latestonly):
650 def entries(context, notip, latestonly):
651 t = i
651 t = i
652 if notip:
652 if notip:
653 t = [(k, n) for k, n in i if k != b"tip"]
653 t = [(k, n) for k, n in i if k != b"tip"]
654 if latestonly:
654 if latestonly:
655 t = t[:1]
655 t = t[:1]
656 for k, n in t:
656 for k, n in t:
657 yield {
657 yield {
658 b"parity": next(parity),
658 b"parity": next(parity),
659 b"tag": k,
659 b"tag": k,
660 b"date": web.repo[n].date(),
660 b"date": web.repo[n].date(),
661 b"node": hex(n),
661 b"node": hex(n),
662 }
662 }
663
663
664 return web.sendtemplate(
664 return web.sendtemplate(
665 b'tags',
665 b'tags',
666 node=hex(web.repo.changelog.tip()),
666 node=hex(web.repo.changelog.tip()),
667 entries=templateutil.mappinggenerator(entries, args=(False, False)),
667 entries=templateutil.mappinggenerator(entries, args=(False, False)),
668 entriesnotip=templateutil.mappinggenerator(entries, args=(True, False)),
668 entriesnotip=templateutil.mappinggenerator(entries, args=(True, False)),
669 latestentry=templateutil.mappinggenerator(entries, args=(True, True)),
669 latestentry=templateutil.mappinggenerator(entries, args=(True, True)),
670 )
670 )
671
671
672
672
673 @webcommand(b'bookmarks')
673 @webcommand(b'bookmarks')
674 def bookmarks(web):
674 def bookmarks(web):
675 """
675 """
676 /bookmarks
676 /bookmarks
677 ----------
677 ----------
678
678
679 Show information about bookmarks.
679 Show information about bookmarks.
680
680
681 No arguments are accepted.
681 No arguments are accepted.
682
682
683 The ``bookmarks`` template is rendered.
683 The ``bookmarks`` template is rendered.
684 """
684 """
685 i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
685 i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
686 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
686 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
687 i = sorted(i, key=sortkey, reverse=True)
687 i = sorted(i, key=sortkey, reverse=True)
688 parity = paritygen(web.stripecount)
688 parity = paritygen(web.stripecount)
689
689
690 def entries(context, latestonly):
690 def entries(context, latestonly):
691 t = i
691 t = i
692 if latestonly:
692 if latestonly:
693 t = i[:1]
693 t = i[:1]
694 for k, n in t:
694 for k, n in t:
695 yield {
695 yield {
696 b"parity": next(parity),
696 b"parity": next(parity),
697 b"bookmark": k,
697 b"bookmark": k,
698 b"date": web.repo[n].date(),
698 b"date": web.repo[n].date(),
699 b"node": hex(n),
699 b"node": hex(n),
700 }
700 }
701
701
702 if i:
702 if i:
703 latestrev = i[0][1]
703 latestrev = i[0][1]
704 else:
704 else:
705 latestrev = -1
705 latestrev = -1
706 lastdate = web.repo[latestrev].date()
706 lastdate = web.repo[latestrev].date()
707
707
708 return web.sendtemplate(
708 return web.sendtemplate(
709 b'bookmarks',
709 b'bookmarks',
710 node=hex(web.repo.changelog.tip()),
710 node=hex(web.repo.changelog.tip()),
711 lastchange=templateutil.mappinglist([{b'date': lastdate}]),
711 lastchange=templateutil.mappinglist([{b'date': lastdate}]),
712 entries=templateutil.mappinggenerator(entries, args=(False,)),
712 entries=templateutil.mappinggenerator(entries, args=(False,)),
713 latestentry=templateutil.mappinggenerator(entries, args=(True,)),
713 latestentry=templateutil.mappinggenerator(entries, args=(True,)),
714 )
714 )
715
715
716
716
717 @webcommand(b'branches')
717 @webcommand(b'branches')
718 def branches(web):
718 def branches(web):
719 """
719 """
720 /branches
720 /branches
721 ---------
721 ---------
722
722
723 Show information about branches.
723 Show information about branches.
724
724
725 All known branches are contained in the output, even closed branches.
725 All known branches are contained in the output, even closed branches.
726
726
727 No arguments are accepted.
727 No arguments are accepted.
728
728
729 The ``branches`` template is rendered.
729 The ``branches`` template is rendered.
730 """
730 """
731 entries = webutil.branchentries(web.repo, web.stripecount)
731 entries = webutil.branchentries(web.repo, web.stripecount)
732 latestentry = webutil.branchentries(web.repo, web.stripecount, 1)
732 latestentry = webutil.branchentries(web.repo, web.stripecount, 1)
733
733
734 return web.sendtemplate(
734 return web.sendtemplate(
735 b'branches',
735 b'branches',
736 node=hex(web.repo.changelog.tip()),
736 node=hex(web.repo.changelog.tip()),
737 entries=entries,
737 entries=entries,
738 latestentry=latestentry,
738 latestentry=latestentry,
739 )
739 )
740
740
741
741
742 @webcommand(b'summary')
742 @webcommand(b'summary')
743 def summary(web):
743 def summary(web):
744 """
744 """
745 /summary
745 /summary
746 --------
746 --------
747
747
748 Show a summary of repository state.
748 Show a summary of repository state.
749
749
750 Information about the latest changesets, bookmarks, tags, and branches
750 Information about the latest changesets, bookmarks, tags, and branches
751 is captured by this handler.
751 is captured by this handler.
752
752
753 The ``summary`` template is rendered.
753 The ``summary`` template is rendered.
754 """
754 """
755 i = reversed(web.repo.tagslist())
755 i = reversed(web.repo.tagslist())
756
756
757 def tagentries(context):
757 def tagentries(context):
758 parity = paritygen(web.stripecount)
758 parity = paritygen(web.stripecount)
759 count = 0
759 count = 0
760 for k, n in i:
760 for k, n in i:
761 if k == b"tip": # skip tip
761 if k == b"tip": # skip tip
762 continue
762 continue
763
763
764 count += 1
764 count += 1
765 if count > 10: # limit to 10 tags
765 if count > 10: # limit to 10 tags
766 break
766 break
767
767
768 yield {
768 yield {
769 b'parity': next(parity),
769 b'parity': next(parity),
770 b'tag': k,
770 b'tag': k,
771 b'node': hex(n),
771 b'node': hex(n),
772 b'date': web.repo[n].date(),
772 b'date': web.repo[n].date(),
773 }
773 }
774
774
775 def bookmarks(context):
775 def bookmarks(context):
776 parity = paritygen(web.stripecount)
776 parity = paritygen(web.stripecount)
777 marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
777 marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
778 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
778 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
779 marks = sorted(marks, key=sortkey, reverse=True)
779 marks = sorted(marks, key=sortkey, reverse=True)
780 for k, n in marks[:10]: # limit to 10 bookmarks
780 for k, n in marks[:10]: # limit to 10 bookmarks
781 yield {
781 yield {
782 b'parity': next(parity),
782 b'parity': next(parity),
783 b'bookmark': k,
783 b'bookmark': k,
784 b'date': web.repo[n].date(),
784 b'date': web.repo[n].date(),
785 b'node': hex(n),
785 b'node': hex(n),
786 }
786 }
787
787
788 def changelist(context):
788 def changelist(context):
789 parity = paritygen(web.stripecount, offset=start - end)
789 parity = paritygen(web.stripecount, offset=start - end)
790 l = [] # build a list in forward order for efficiency
790 l = [] # build a list in forward order for efficiency
791 revs = []
791 revs = []
792 if start < end:
792 if start < end:
793 revs = web.repo.changelog.revs(start, end - 1)
793 revs = web.repo.changelog.revs(start, end - 1)
794 for i in revs:
794 for i in revs:
795 ctx = web.repo[i]
795 ctx = web.repo[i]
796 lm = webutil.commonentry(web.repo, ctx)
796 lm = webutil.commonentry(web.repo, ctx)
797 lm[b'parity'] = next(parity)
797 lm[b'parity'] = next(parity)
798 l.append(lm)
798 l.append(lm)
799
799
800 for entry in reversed(l):
800 for entry in reversed(l):
801 yield entry
801 yield entry
802
802
803 tip = web.repo[b'tip']
803 tip = web.repo[b'tip']
804 count = len(web.repo)
804 count = len(web.repo)
805 start = max(0, count - web.maxchanges)
805 start = max(0, count - web.maxchanges)
806 end = min(count, start + web.maxchanges)
806 end = min(count, start + web.maxchanges)
807
807
808 desc = web.config(b"web", b"description")
808 desc = web.config(b"web", b"description")
809 if not desc:
809 if not desc:
810 desc = b'unknown'
810 desc = b'unknown'
811 labels = web.configlist(b'web', b'labels')
811 labels = web.configlist(b'web', b'labels')
812
812
813 return web.sendtemplate(
813 return web.sendtemplate(
814 b'summary',
814 b'summary',
815 desc=desc,
815 desc=desc,
816 owner=get_contact(web.config) or b'unknown',
816 owner=get_contact(web.config) or b'unknown',
817 lastchange=tip.date(),
817 lastchange=tip.date(),
818 tags=templateutil.mappinggenerator(tagentries, name=b'tagentry'),
818 tags=templateutil.mappinggenerator(tagentries, name=b'tagentry'),
819 bookmarks=templateutil.mappinggenerator(bookmarks),
819 bookmarks=templateutil.mappinggenerator(bookmarks),
820 branches=webutil.branchentries(web.repo, web.stripecount, 10),
820 branches=webutil.branchentries(web.repo, web.stripecount, 10),
821 shortlog=templateutil.mappinggenerator(
821 shortlog=templateutil.mappinggenerator(
822 changelist, name=b'shortlogentry'
822 changelist, name=b'shortlogentry'
823 ),
823 ),
824 node=tip.hex(),
824 node=tip.hex(),
825 symrev=b'tip',
825 symrev=b'tip',
826 archives=web.archivelist(b'tip'),
826 archives=web.archivelist(b'tip'),
827 labels=templateutil.hybridlist(labels, name=b'label'),
827 labels=templateutil.hybridlist(labels, name=b'label'),
828 )
828 )
829
829
830
830
831 @webcommand(b'filediff')
831 @webcommand(b'filediff')
832 def filediff(web):
832 def filediff(web):
833 """
833 """
834 /diff/{revision}/{path}
834 /diff/{revision}/{path}
835 -----------------------
835 -----------------------
836
836
837 Show how a file changed in a particular commit.
837 Show how a file changed in a particular commit.
838
838
839 The ``filediff`` template is rendered.
839 The ``filediff`` template is rendered.
840
840
841 This handler is registered under both the ``/diff`` and ``/filediff``
841 This handler is registered under both the ``/diff`` and ``/filediff``
842 paths. ``/diff`` is used in modern code.
842 paths. ``/diff`` is used in modern code.
843 """
843 """
844 fctx, ctx = None, None
844 fctx, ctx = None, None
845 try:
845 try:
846 fctx = webutil.filectx(web.repo, web.req)
846 fctx = webutil.filectx(web.repo, web.req)
847 except LookupError:
847 except LookupError:
848 ctx = webutil.changectx(web.repo, web.req)
848 ctx = webutil.changectx(web.repo, web.req)
849 path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
849 path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
850 if path not in ctx.files():
850 if path not in ctx.files():
851 raise
851 raise
852
852
853 if fctx is not None:
853 if fctx is not None:
854 path = fctx.path()
854 path = fctx.path()
855 ctx = fctx.changectx()
855 ctx = fctx.changectx()
856 basectx = ctx.p1()
856 basectx = ctx.p1()
857
857
858 style = web.config(b'web', b'style')
858 style = web.config(b'web', b'style')
859 if b'style' in web.req.qsparams:
859 if b'style' in web.req.qsparams:
860 style = web.req.qsparams[b'style']
860 style = web.req.qsparams[b'style']
861
861
862 diffs = webutil.diffs(web, ctx, basectx, [path], style)
862 diffs = webutil.diffs(web, ctx, basectx, [path], style)
863 if fctx is not None:
863 if fctx is not None:
864 rename = webutil.renamelink(fctx)
864 rename = webutil.renamelink(fctx)
865 ctx = fctx
865 ctx = fctx
866 else:
866 else:
867 rename = templateutil.mappinglist([])
867 rename = templateutil.mappinglist([])
868 ctx = ctx
868 ctx = ctx
869
869
870 return web.sendtemplate(
870 return web.sendtemplate(
871 b'filediff',
871 b'filediff',
872 file=path,
872 file=path,
873 symrev=webutil.symrevorshortnode(web.req, ctx),
873 symrev=webutil.symrevorshortnode(web.req, ctx),
874 rename=rename,
874 rename=rename,
875 diff=diffs,
875 diff=diffs,
876 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
876 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
877 )
877 )
878
878
879
879
880 diff = webcommand(b'diff')(filediff)
880 diff = webcommand(b'diff')(filediff)
881
881
882
882
883 @webcommand(b'comparison')
883 @webcommand(b'comparison')
884 def comparison(web):
884 def comparison(web):
885 """
885 """
886 /comparison/{revision}/{path}
886 /comparison/{revision}/{path}
887 -----------------------------
887 -----------------------------
888
888
889 Show a comparison between the old and new versions of a file from changes
889 Show a comparison between the old and new versions of a file from changes
890 made on a particular revision.
890 made on a particular revision.
891
891
892 This is similar to the ``diff`` handler. However, this form features
892 This is similar to the ``diff`` handler. However, this form features
893 a split or side-by-side diff rather than a unified diff.
893 a split or side-by-side diff rather than a unified diff.
894
894
895 The ``context`` query string argument can be used to control the lines of
895 The ``context`` query string argument can be used to control the lines of
896 context in the diff.
896 context in the diff.
897
897
898 The ``filecomparison`` template is rendered.
898 The ``filecomparison`` template is rendered.
899 """
899 """
900 ctx = webutil.changectx(web.repo, web.req)
900 ctx = webutil.changectx(web.repo, web.req)
901 if b'file' not in web.req.qsparams:
901 if b'file' not in web.req.qsparams:
902 raise ErrorResponse(HTTP_NOT_FOUND, b'file not given')
902 raise ErrorResponse(HTTP_NOT_FOUND, b'file not given')
903 path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
903 path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
904
904
905 parsecontext = lambda v: v == b'full' and -1 or int(v)
905 parsecontext = lambda v: v == b'full' and -1 or int(v)
906 if b'context' in web.req.qsparams:
906 if b'context' in web.req.qsparams:
907 context = parsecontext(web.req.qsparams[b'context'])
907 context = parsecontext(web.req.qsparams[b'context'])
908 else:
908 else:
909 context = parsecontext(web.config(b'web', b'comparisoncontext'))
909 context = parsecontext(web.config(b'web', b'comparisoncontext'))
910
910
911 def filelines(f):
911 def filelines(f):
912 if f.isbinary():
912 if f.isbinary():
913 mt = pycompat.sysbytes(
913 mt = pycompat.sysbytes(
914 mimetypes.guess_type(pycompat.fsdecode(f.path()))[0]
914 mimetypes.guess_type(pycompat.fsdecode(f.path()))[0]
915 or r'application/octet-stream'
915 or r'application/octet-stream'
916 )
916 )
917 return [_(b'(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
917 return [_(b'(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
918 return f.data().splitlines()
918 return f.data().splitlines()
919
919
920 fctx = None
920 fctx = None
921 parent = ctx.p1()
921 parent = ctx.p1()
922 leftrev = parent.rev()
922 leftrev = parent.rev()
923 leftnode = parent.node()
923 leftnode = parent.node()
924 rightrev = ctx.rev()
924 rightrev = ctx.rev()
925 rightnode = scmutil.binnode(ctx)
925 rightnode = scmutil.binnode(ctx)
926 if path in ctx:
926 if path in ctx:
927 fctx = ctx[path]
927 fctx = ctx[path]
928 rightlines = filelines(fctx)
928 rightlines = filelines(fctx)
929 if path not in parent:
929 if path not in parent:
930 leftlines = ()
930 leftlines = ()
931 else:
931 else:
932 pfctx = parent[path]
932 pfctx = parent[path]
933 leftlines = filelines(pfctx)
933 leftlines = filelines(pfctx)
934 else:
934 else:
935 rightlines = ()
935 rightlines = ()
936 pfctx = ctx.p1()[path]
936 pfctx = ctx.p1()[path]
937 leftlines = filelines(pfctx)
937 leftlines = filelines(pfctx)
938
938
939 comparison = webutil.compare(context, leftlines, rightlines)
939 comparison = webutil.compare(context, leftlines, rightlines)
940 if fctx is not None:
940 if fctx is not None:
941 rename = webutil.renamelink(fctx)
941 rename = webutil.renamelink(fctx)
942 ctx = fctx
942 ctx = fctx
943 else:
943 else:
944 rename = templateutil.mappinglist([])
944 rename = templateutil.mappinglist([])
945 ctx = ctx
945 ctx = ctx
946
946
947 return web.sendtemplate(
947 return web.sendtemplate(
948 b'filecomparison',
948 b'filecomparison',
949 file=path,
949 file=path,
950 symrev=webutil.symrevorshortnode(web.req, ctx),
950 symrev=webutil.symrevorshortnode(web.req, ctx),
951 rename=rename,
951 rename=rename,
952 leftrev=leftrev,
952 leftrev=leftrev,
953 leftnode=hex(leftnode),
953 leftnode=hex(leftnode),
954 rightrev=rightrev,
954 rightrev=rightrev,
955 rightnode=hex(rightnode),
955 rightnode=hex(rightnode),
956 comparison=comparison,
956 comparison=comparison,
957 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
957 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
958 )
958 )
959
959
960
960
961 @webcommand(b'annotate')
961 @webcommand(b'annotate')
962 def annotate(web):
962 def annotate(web):
963 """
963 """
964 /annotate/{revision}/{path}
964 /annotate/{revision}/{path}
965 ---------------------------
965 ---------------------------
966
966
967 Show changeset information for each line in a file.
967 Show changeset information for each line in a file.
968
968
969 The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and
969 The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and
970 ``ignoreblanklines`` query string arguments have the same meaning as
970 ``ignoreblanklines`` query string arguments have the same meaning as
971 their ``[annotate]`` config equivalents. It uses the hgrc boolean
971 their ``[annotate]`` config equivalents. It uses the hgrc boolean
972 parsing logic to interpret the value. e.g. ``0`` and ``false`` are
972 parsing logic to interpret the value. e.g. ``0`` and ``false`` are
973 false and ``1`` and ``true`` are true. If not defined, the server
973 false and ``1`` and ``true`` are true. If not defined, the server
974 default settings are used.
974 default settings are used.
975
975
976 The ``fileannotate`` template is rendered.
976 The ``fileannotate`` template is rendered.
977 """
977 """
978 fctx = webutil.filectx(web.repo, web.req)
978 fctx = webutil.filectx(web.repo, web.req)
979 f = fctx.path()
979 f = fctx.path()
980 parity = paritygen(web.stripecount)
980 parity = paritygen(web.stripecount)
981 ishead = fctx.filenode() in fctx.filelog().heads()
981 ishead = fctx.filenode() in fctx.filelog().heads()
982
982
983 # parents() is called once per line and several lines likely belong to
983 # parents() is called once per line and several lines likely belong to
984 # same revision. So it is worth caching.
984 # same revision. So it is worth caching.
985 # TODO there are still redundant operations within basefilectx.parents()
985 # TODO there are still redundant operations within basefilectx.parents()
986 # and from the fctx.annotate() call itself that could be cached.
986 # and from the fctx.annotate() call itself that could be cached.
987 parentscache = {}
987 parentscache = {}
988
988
989 def parents(context, f):
989 def parents(context, f):
990 rev = f.rev()
990 rev = f.rev()
991 if rev not in parentscache:
991 if rev not in parentscache:
992 parentscache[rev] = []
992 parentscache[rev] = []
993 for p in f.parents():
993 for p in f.parents():
994 entry = {
994 entry = {
995 b'node': p.hex(),
995 b'node': p.hex(),
996 b'rev': p.rev(),
996 b'rev': p.rev(),
997 }
997 }
998 parentscache[rev].append(entry)
998 parentscache[rev].append(entry)
999
999
1000 for p in parentscache[rev]:
1000 for p in parentscache[rev]:
1001 yield p
1001 yield p
1002
1002
1003 def annotate(context):
1003 def annotate(context):
1004 if fctx.isbinary():
1004 if fctx.isbinary():
1005 mt = pycompat.sysbytes(
1005 mt = pycompat.sysbytes(
1006 mimetypes.guess_type(pycompat.fsdecode(fctx.path()))[0]
1006 mimetypes.guess_type(pycompat.fsdecode(fctx.path()))[0]
1007 or r'application/octet-stream'
1007 or r'application/octet-stream'
1008 )
1008 )
1009 lines = [
1009 lines = [
1010 dagop.annotateline(
1010 dagop.annotateline(
1011 fctx=fctx.filectx(fctx.filerev()),
1011 fctx=fctx.filectx(fctx.filerev()),
1012 lineno=1,
1012 lineno=1,
1013 text=b'(binary:%s)' % mt,
1013 text=b'(binary:%s)' % mt,
1014 )
1014 )
1015 ]
1015 ]
1016 else:
1016 else:
1017 lines = webutil.annotate(web.req, fctx, web.repo.ui)
1017 lines = webutil.annotate(web.req, fctx, web.repo.ui)
1018
1018
1019 previousrev = None
1019 previousrev = None
1020 blockparitygen = paritygen(1)
1020 blockparitygen = paritygen(1)
1021 for lineno, aline in enumerate(lines):
1021 for lineno, aline in enumerate(lines):
1022 f = aline.fctx
1022 f = aline.fctx
1023 rev = f.rev()
1023 rev = f.rev()
1024 if rev != previousrev:
1024 if rev != previousrev:
1025 blockhead = True
1025 blockhead = True
1026 blockparity = next(blockparitygen)
1026 blockparity = next(blockparitygen)
1027 else:
1027 else:
1028 blockhead = None
1028 blockhead = None
1029 previousrev = rev
1029 previousrev = rev
1030 yield {
1030 yield {
1031 b"parity": next(parity),
1031 b"parity": next(parity),
1032 b"node": f.hex(),
1032 b"node": f.hex(),
1033 b"rev": rev,
1033 b"rev": rev,
1034 b"author": f.user(),
1034 b"author": f.user(),
1035 b"parents": templateutil.mappinggenerator(parents, args=(f,)),
1035 b"parents": templateutil.mappinggenerator(parents, args=(f,)),
1036 b"desc": f.description(),
1036 b"desc": f.description(),
1037 b"extra": f.extra(),
1037 b"extra": f.extra(),
1038 b"file": f.path(),
1038 b"file": f.path(),
1039 b"blockhead": blockhead,
1039 b"blockhead": blockhead,
1040 b"blockparity": blockparity,
1040 b"blockparity": blockparity,
1041 b"targetline": aline.lineno,
1041 b"targetline": aline.lineno,
1042 b"line": aline.text,
1042 b"line": aline.text,
1043 b"lineno": lineno + 1,
1043 b"lineno": lineno + 1,
1044 b"lineid": b"l%d" % (lineno + 1),
1044 b"lineid": b"l%d" % (lineno + 1),
1045 b"linenumber": b"% 6d" % (lineno + 1),
1045 b"linenumber": b"% 6d" % (lineno + 1),
1046 b"revdate": f.date(),
1046 b"revdate": f.date(),
1047 }
1047 }
1048
1048
1049 diffopts = webutil.difffeatureopts(web.req, web.repo.ui, b'annotate')
1049 diffopts = webutil.difffeatureopts(web.req, web.repo.ui, b'annotate')
1050 diffopts = {
1050 diffopts = {
1051 k: getattr(diffopts, pycompat.sysstr(k)) for k in diffopts.defaults
1051 k: getattr(diffopts, pycompat.sysstr(k)) for k in diffopts.defaults
1052 }
1052 }
1053
1053
1054 return web.sendtemplate(
1054 return web.sendtemplate(
1055 b'fileannotate',
1055 b'fileannotate',
1056 file=f,
1056 file=f,
1057 annotate=templateutil.mappinggenerator(annotate),
1057 annotate=templateutil.mappinggenerator(annotate),
1058 path=webutil.up(f),
1058 path=webutil.up(f),
1059 symrev=webutil.symrevorshortnode(web.req, fctx),
1059 symrev=webutil.symrevorshortnode(web.req, fctx),
1060 rename=webutil.renamelink(fctx),
1060 rename=webutil.renamelink(fctx),
1061 permissions=fctx.manifest().flags(f),
1061 permissions=fctx.manifest().flags(f),
1062 ishead=int(ishead),
1062 ishead=int(ishead),
1063 diffopts=templateutil.hybriddict(diffopts),
1063 diffopts=templateutil.hybriddict(diffopts),
1064 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
1064 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
1065 )
1065 )
1066
1066
1067
1067
1068 @webcommand(b'filelog')
1068 @webcommand(b'filelog')
1069 def filelog(web):
1069 def filelog(web):
1070 """
1070 """
1071 /filelog/{revision}/{path}
1071 /filelog/{revision}/{path}
1072 --------------------------
1072 --------------------------
1073
1073
1074 Show information about the history of a file in the repository.
1074 Show information about the history of a file in the repository.
1075
1075
1076 The ``revcount`` query string argument can be defined to control the
1076 The ``revcount`` query string argument can be defined to control the
1077 maximum number of entries to show.
1077 maximum number of entries to show.
1078
1078
1079 The ``filelog`` template will be rendered.
1079 The ``filelog`` template will be rendered.
1080 """
1080 """
1081
1081
1082 try:
1082 try:
1083 fctx = webutil.filectx(web.repo, web.req)
1083 fctx = webutil.filectx(web.repo, web.req)
1084 f = fctx.path()
1084 f = fctx.path()
1085 fl = fctx.filelog()
1085 fl = fctx.filelog()
1086 except error.LookupError:
1086 except error.LookupError:
1087 f = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
1087 f = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
1088 fl = web.repo.file(f)
1088 fl = web.repo.file(f)
1089 numrevs = len(fl)
1089 numrevs = len(fl)
1090 if not numrevs: # file doesn't exist at all
1090 if not numrevs: # file doesn't exist at all
1091 raise
1091 raise
1092 rev = webutil.changectx(web.repo, web.req).rev()
1092 rev = webutil.changectx(web.repo, web.req).rev()
1093 first = fl.linkrev(0)
1093 first = fl.linkrev(0)
1094 if rev < first: # current rev is from before file existed
1094 if rev < first: # current rev is from before file existed
1095 raise
1095 raise
1096 frev = numrevs - 1
1096 frev = numrevs - 1
1097 while fl.linkrev(frev) > rev:
1097 while fl.linkrev(frev) > rev:
1098 frev -= 1
1098 frev -= 1
1099 fctx = web.repo.filectx(f, fl.linkrev(frev))
1099 fctx = web.repo.filectx(f, fl.linkrev(frev))
1100
1100
1101 revcount = web.maxshortchanges
1101 revcount = web.maxshortchanges
1102 if b'revcount' in web.req.qsparams:
1102 if b'revcount' in web.req.qsparams:
1103 try:
1103 try:
1104 revcount = int(web.req.qsparams.get(b'revcount', revcount))
1104 revcount = int(web.req.qsparams.get(b'revcount', revcount))
1105 revcount = max(revcount, 1)
1105 revcount = max(revcount, 1)
1106 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
1106 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
1107 except ValueError:
1107 except ValueError:
1108 pass
1108 pass
1109
1109
1110 lrange = webutil.linerange(web.req)
1110 lrange = webutil.linerange(web.req)
1111
1111
1112 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1112 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1113 lessvars[b'revcount'] = max(revcount // 2, 1)
1113 lessvars[b'revcount'] = max(revcount // 2, 1)
1114 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1114 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1115 morevars[b'revcount'] = revcount * 2
1115 morevars[b'revcount'] = revcount * 2
1116
1116
1117 patch = b'patch' in web.req.qsparams
1117 patch = b'patch' in web.req.qsparams
1118 if patch:
1118 if patch:
1119 lessvars[b'patch'] = morevars[b'patch'] = web.req.qsparams[b'patch']
1119 lessvars[b'patch'] = morevars[b'patch'] = web.req.qsparams[b'patch']
1120 descend = b'descend' in web.req.qsparams
1120 descend = b'descend' in web.req.qsparams
1121 if descend:
1121 if descend:
1122 lessvars[b'descend'] = morevars[b'descend'] = web.req.qsparams[
1122 lessvars[b'descend'] = morevars[b'descend'] = web.req.qsparams[
1123 b'descend'
1123 b'descend'
1124 ]
1124 ]
1125
1125
1126 count = fctx.filerev() + 1
1126 count = fctx.filerev() + 1
1127 start = max(0, count - revcount) # first rev on this page
1127 start = max(0, count - revcount) # first rev on this page
1128 end = min(count, start + revcount) # last rev on this page
1128 end = min(count, start + revcount) # last rev on this page
1129 parity = paritygen(web.stripecount, offset=start - end)
1129 parity = paritygen(web.stripecount, offset=start - end)
1130
1130
1131 repo = web.repo
1131 repo = web.repo
1132 filelog = fctx.filelog()
1132 filelog = fctx.filelog()
1133 revs = [
1133 revs = [
1134 filerev
1134 filerev
1135 for filerev in filelog.revs(start, end - 1)
1135 for filerev in filelog.revs(start, end - 1)
1136 if filelog.linkrev(filerev) in repo
1136 if filelog.linkrev(filerev) in repo
1137 ]
1137 ]
1138 entries = []
1138 entries = []
1139
1139
1140 diffstyle = web.config(b'web', b'style')
1140 diffstyle = web.config(b'web', b'style')
1141 if b'style' in web.req.qsparams:
1141 if b'style' in web.req.qsparams:
1142 diffstyle = web.req.qsparams[b'style']
1142 diffstyle = web.req.qsparams[b'style']
1143
1143
1144 def diff(fctx, linerange=None):
1144 def diff(fctx, linerange=None):
1145 ctx = fctx.changectx()
1145 ctx = fctx.changectx()
1146 basectx = ctx.p1()
1146 basectx = ctx.p1()
1147 path = fctx.path()
1147 path = fctx.path()
1148 return webutil.diffs(
1148 return webutil.diffs(
1149 web,
1149 web,
1150 ctx,
1150 ctx,
1151 basectx,
1151 basectx,
1152 [path],
1152 [path],
1153 diffstyle,
1153 diffstyle,
1154 linerange=linerange,
1154 linerange=linerange,
1155 lineidprefix=b'%s-' % ctx.hex()[:12],
1155 lineidprefix=b'%s-' % ctx.hex()[:12],
1156 )
1156 )
1157
1157
1158 linerange = None
1158 linerange = None
1159 if lrange is not None:
1159 if lrange is not None:
1160 assert lrange is not None # help pytype (!?)
1160 assert lrange is not None # help pytype (!?)
1161 linerange = webutil.formatlinerange(*lrange)
1161 linerange = webutil.formatlinerange(*lrange)
1162 # deactivate numeric nav links when linerange is specified as this
1162 # deactivate numeric nav links when linerange is specified as this
1163 # would required a dedicated "revnav" class
1163 # would required a dedicated "revnav" class
1164 nav = templateutil.mappinglist([])
1164 nav = templateutil.mappinglist([])
1165 if descend:
1165 if descend:
1166 it = dagop.blockdescendants(fctx, *lrange)
1166 it = dagop.blockdescendants(fctx, *lrange)
1167 else:
1167 else:
1168 it = dagop.blockancestors(fctx, *lrange)
1168 it = dagop.blockancestors(fctx, *lrange)
1169 for i, (c, lr) in enumerate(it, 1):
1169 for i, (c, lr) in enumerate(it, 1):
1170 diffs = None
1170 diffs = None
1171 if patch:
1171 if patch:
1172 diffs = diff(c, linerange=lr)
1172 diffs = diff(c, linerange=lr)
1173 # follow renames accross filtered (not in range) revisions
1173 # follow renames accross filtered (not in range) revisions
1174 path = c.path()
1174 path = c.path()
1175 lm = webutil.commonentry(repo, c)
1175 lm = webutil.commonentry(repo, c)
1176 lm.update(
1176 lm.update(
1177 {
1177 {
1178 b'parity': next(parity),
1178 b'parity': next(parity),
1179 b'filerev': c.rev(),
1179 b'filerev': c.rev(),
1180 b'file': path,
1180 b'file': path,
1181 b'diff': diffs,
1181 b'diff': diffs,
1182 b'linerange': webutil.formatlinerange(*lr),
1182 b'linerange': webutil.formatlinerange(*lr),
1183 b'rename': templateutil.mappinglist([]),
1183 b'rename': templateutil.mappinglist([]),
1184 }
1184 }
1185 )
1185 )
1186 entries.append(lm)
1186 entries.append(lm)
1187 if i == revcount:
1187 if i == revcount:
1188 break
1188 break
1189 lessvars[b'linerange'] = webutil.formatlinerange(*lrange)
1189 lessvars[b'linerange'] = webutil.formatlinerange(*lrange)
1190 morevars[b'linerange'] = lessvars[b'linerange']
1190 morevars[b'linerange'] = lessvars[b'linerange']
1191 else:
1191 else:
1192 for i in revs:
1192 for i in revs:
1193 iterfctx = fctx.filectx(i)
1193 iterfctx = fctx.filectx(i)
1194 diffs = None
1194 diffs = None
1195 if patch:
1195 if patch:
1196 diffs = diff(iterfctx)
1196 diffs = diff(iterfctx)
1197 lm = webutil.commonentry(repo, iterfctx)
1197 lm = webutil.commonentry(repo, iterfctx)
1198 lm.update(
1198 lm.update(
1199 {
1199 {
1200 b'parity': next(parity),
1200 b'parity': next(parity),
1201 b'filerev': i,
1201 b'filerev': i,
1202 b'file': f,
1202 b'file': f,
1203 b'diff': diffs,
1203 b'diff': diffs,
1204 b'rename': webutil.renamelink(iterfctx),
1204 b'rename': webutil.renamelink(iterfctx),
1205 }
1205 }
1206 )
1206 )
1207 entries.append(lm)
1207 entries.append(lm)
1208 entries.reverse()
1208 entries.reverse()
1209 revnav = webutil.filerevnav(web.repo, fctx.path())
1209 revnav = webutil.filerevnav(web.repo, fctx.path())
1210 nav = revnav.gen(end - 1, revcount, count)
1210 nav = revnav.gen(end - 1, revcount, count)
1211
1211
1212 latestentry = entries[:1]
1212 latestentry = entries[:1]
1213
1213
1214 return web.sendtemplate(
1214 return web.sendtemplate(
1215 b'filelog',
1215 b'filelog',
1216 file=f,
1216 file=f,
1217 nav=nav,
1217 nav=nav,
1218 symrev=webutil.symrevorshortnode(web.req, fctx),
1218 symrev=webutil.symrevorshortnode(web.req, fctx),
1219 entries=templateutil.mappinglist(entries),
1219 entries=templateutil.mappinglist(entries),
1220 descend=descend,
1220 descend=descend,
1221 patch=patch,
1221 patch=patch,
1222 latestentry=templateutil.mappinglist(latestentry),
1222 latestentry=templateutil.mappinglist(latestentry),
1223 linerange=linerange,
1223 linerange=linerange,
1224 revcount=revcount,
1224 revcount=revcount,
1225 morevars=morevars,
1225 morevars=morevars,
1226 lessvars=lessvars,
1226 lessvars=lessvars,
1227 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
1227 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
1228 )
1228 )
1229
1229
1230
1230
1231 @webcommand(b'archive')
1231 @webcommand(b'archive')
1232 def archive(web):
1232 def archive(web):
1233 """
1233 """
1234 /archive/{revision}.{format}[/{path}]
1234 /archive/{revision}.{format}[/{path}]
1235 -------------------------------------
1235 -------------------------------------
1236
1236
1237 Obtain an archive of repository content.
1237 Obtain an archive of repository content.
1238
1238
1239 The content and type of the archive is defined by a URL path parameter.
1239 The content and type of the archive is defined by a URL path parameter.
1240 ``format`` is the file extension of the archive type to be generated. e.g.
1240 ``format`` is the file extension of the archive type to be generated. e.g.
1241 ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your
1241 ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your
1242 server configuration.
1242 server configuration.
1243
1243
1244 The optional ``path`` URL parameter controls content to include in the
1244 The optional ``path`` URL parameter controls content to include in the
1245 archive. If omitted, every file in the specified revision is present in the
1245 archive. If omitted, every file in the specified revision is present in the
1246 archive. If included, only the specified file or contents of the specified
1246 archive. If included, only the specified file or contents of the specified
1247 directory will be included in the archive.
1247 directory will be included in the archive.
1248
1248
1249 No template is used for this handler. Raw, binary content is generated.
1249 No template is used for this handler. Raw, binary content is generated.
1250 """
1250 """
1251
1251
1252 type_ = web.req.qsparams.get(b'type')
1252 type_ = web.req.qsparams.get(b'type')
1253 allowed = web.configlist(b"web", b"allow-archive")
1253 allowed = web.configlist(b"web", b"allow-archive")
1254 key = web.req.qsparams[b'node']
1254 key = web.req.qsparams[b'node']
1255
1255
1256 if type_ not in webutil.archivespecs:
1256 if type_ not in webutil.archivespecs:
1257 msg = b'Unsupported archive type: %s' % stringutil.pprint(type_)
1257 msg = b'Unsupported archive type: %s' % stringutil.pprint(type_)
1258 raise ErrorResponse(HTTP_NOT_FOUND, msg)
1258 raise ErrorResponse(HTTP_NOT_FOUND, msg)
1259
1259
1260 if not ((type_ in allowed or web.configbool(b"web", b"allow" + type_))):
1260 if not ((type_ in allowed or web.configbool(b"web", b"allow" + type_))):
1261 msg = b'Archive type not allowed: %s' % type_
1261 msg = b'Archive type not allowed: %s' % type_
1262 raise ErrorResponse(HTTP_FORBIDDEN, msg)
1262 raise ErrorResponse(HTTP_FORBIDDEN, msg)
1263
1263
1264 reponame = re.sub(br"\W+", b"-", os.path.basename(web.reponame))
1264 reponame = re.sub(br"\W+", b"-", os.path.basename(web.reponame))
1265 cnode = web.repo.lookup(key)
1265 cnode = web.repo.lookup(key)
1266 arch_version = key
1266 arch_version = key
1267 if cnode == key or key == b'tip':
1267 if cnode == key or key == b'tip':
1268 arch_version = short(cnode)
1268 arch_version = short(cnode)
1269 name = b"%s-%s" % (reponame, arch_version)
1269 name = b"%s-%s" % (reponame, arch_version)
1270
1270
1271 ctx = webutil.changectx(web.repo, web.req)
1271 ctx = webutil.changectx(web.repo, web.req)
1272 match = scmutil.match(ctx, [])
1272 match = scmutil.match(ctx, [])
1273 file = web.req.qsparams.get(b'file')
1273 file = web.req.qsparams.get(b'file')
1274 if file:
1274 if file:
1275 pats = [b'path:' + file]
1275 pats = [b'path:' + file]
1276 match = scmutil.match(ctx, pats, default=b'path')
1276 match = scmutil.match(ctx, pats, default=b'path')
1277 if pats:
1277 if pats:
1278 files = [f for f in ctx.manifest().keys() if match(f)]
1278 files = [f for f in ctx.manifest().keys() if match(f)]
1279 if not files:
1279 if not files:
1280 raise ErrorResponse(
1280 raise ErrorResponse(
1281 HTTP_NOT_FOUND, b'file(s) not found: %s' % file
1281 HTTP_NOT_FOUND, b'file(s) not found: %s' % file
1282 )
1282 )
1283
1283
1284 mimetype, artype, extension, encoding = webutil.archivespecs[type_]
1284 mimetype, artype, extension, encoding = webutil.archivespecs[type_]
1285
1285
1286 web.res.headers[b'Content-Type'] = mimetype
1286 web.res.headers[b'Content-Type'] = mimetype
1287 web.res.headers[b'Content-Disposition'] = b'attachment; filename=%s%s' % (
1287 web.res.headers[b'Content-Disposition'] = b'attachment; filename=%s%s' % (
1288 name,
1288 name,
1289 extension,
1289 extension,
1290 )
1290 )
1291
1291
1292 if encoding:
1292 if encoding:
1293 web.res.headers[b'Content-Encoding'] = encoding
1293 web.res.headers[b'Content-Encoding'] = encoding
1294
1294
1295 web.res.setbodywillwrite()
1295 web.res.setbodywillwrite()
1296 if list(web.res.sendresponse()):
1296 if list(web.res.sendresponse()):
1297 raise error.ProgrammingError(
1297 raise error.ProgrammingError(
1298 b'sendresponse() should not emit data if writing later'
1298 b'sendresponse() should not emit data if writing later'
1299 )
1299 )
1300
1300
1301 if web.req.method == b'HEAD':
1301 if web.req.method == b'HEAD':
1302 return []
1302 return []
1303
1303
1304 bodyfh = web.res.getbodyfile()
1304 bodyfh = web.res.getbodyfile()
1305
1305
1306 archival.archive(
1306 archival.archive(
1307 web.repo,
1307 web.repo,
1308 bodyfh,
1308 bodyfh,
1309 cnode,
1309 cnode,
1310 artype,
1310 artype,
1311 prefix=name,
1311 prefix=name,
1312 match=match,
1312 match=match,
1313 subrepos=web.configbool(b"web", b"archivesubrepos"),
1313 subrepos=web.configbool(b"web", b"archivesubrepos"),
1314 )
1314 )
1315
1315
1316 return []
1316 return []
1317
1317
1318
1318
1319 @webcommand(b'static')
1319 @webcommand(b'static')
1320 def static(web):
1320 def static(web):
1321 fname = web.req.qsparams[b'file']
1321 fname = web.req.qsparams[b'file']
1322 # a repo owner may set web.static in .hg/hgrc to get any file
1322 # a repo owner may set web.static in .hg/hgrc to get any file
1323 # readable by the user running the CGI script
1323 # readable by the user running the CGI script
1324 static = web.config(b"web", b"static", untrusted=False)
1324 static = web.config(b"web", b"static", untrusted=False)
1325 staticfile(web.templatepath, static, fname, web.res)
1325 staticfile(web.templatepath, static, fname, web.res)
1326 return web.res.sendresponse()
1326 return web.res.sendresponse()
1327
1327
1328
1328
1329 @webcommand(b'graph')
1329 @webcommand(b'graph')
1330 def graph(web):
1330 def graph(web):
1331 """
1331 """
1332 /graph[/{revision}]
1332 /graph[/{revision}]
1333 -------------------
1333 -------------------
1334
1334
1335 Show information about the graphical topology of the repository.
1335 Show information about the graphical topology of the repository.
1336
1336
1337 Information rendered by this handler can be used to create visual
1337 Information rendered by this handler can be used to create visual
1338 representations of repository topology.
1338 representations of repository topology.
1339
1339
1340 The ``revision`` URL parameter controls the starting changeset. If it's
1340 The ``revision`` URL parameter controls the starting changeset. If it's
1341 absent, the default is ``tip``.
1341 absent, the default is ``tip``.
1342
1342
1343 The ``revcount`` query string argument can define the number of changesets
1343 The ``revcount`` query string argument can define the number of changesets
1344 to show information for.
1344 to show information for.
1345
1345
1346 The ``graphtop`` query string argument can specify the starting changeset
1346 The ``graphtop`` query string argument can specify the starting changeset
1347 for producing ``jsdata`` variable that is used for rendering graph in
1347 for producing ``jsdata`` variable that is used for rendering graph in
1348 JavaScript. By default it has the same value as ``revision``.
1348 JavaScript. By default it has the same value as ``revision``.
1349
1349
1350 This handler will render the ``graph`` template.
1350 This handler will render the ``graph`` template.
1351 """
1351 """
1352
1352
1353 if b'node' in web.req.qsparams:
1353 if b'node' in web.req.qsparams:
1354 ctx = webutil.changectx(web.repo, web.req)
1354 ctx = webutil.changectx(web.repo, web.req)
1355 symrev = webutil.symrevorshortnode(web.req, ctx)
1355 symrev = webutil.symrevorshortnode(web.req, ctx)
1356 else:
1356 else:
1357 ctx = web.repo[b'tip']
1357 ctx = web.repo[b'tip']
1358 symrev = b'tip'
1358 symrev = b'tip'
1359 rev = ctx.rev()
1359 rev = ctx.rev()
1360
1360
1361 bg_height = 39
1361 bg_height = 39
1362 revcount = web.maxshortchanges
1362 revcount = web.maxshortchanges
1363 if b'revcount' in web.req.qsparams:
1363 if b'revcount' in web.req.qsparams:
1364 try:
1364 try:
1365 revcount = int(web.req.qsparams.get(b'revcount', revcount))
1365 revcount = int(web.req.qsparams.get(b'revcount', revcount))
1366 revcount = max(revcount, 1)
1366 revcount = max(revcount, 1)
1367 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
1367 web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
1368 except ValueError:
1368 except ValueError:
1369 pass
1369 pass
1370
1370
1371 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1371 lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1372 lessvars[b'revcount'] = max(revcount // 2, 1)
1372 lessvars[b'revcount'] = max(revcount // 2, 1)
1373 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1373 morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1374 morevars[b'revcount'] = revcount * 2
1374 morevars[b'revcount'] = revcount * 2
1375
1375
1376 graphtop = web.req.qsparams.get(b'graphtop', ctx.hex())
1376 graphtop = web.req.qsparams.get(b'graphtop', ctx.hex())
1377 graphvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1377 graphvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
1378 graphvars[b'graphtop'] = graphtop
1378 graphvars[b'graphtop'] = graphtop
1379
1379
1380 count = len(web.repo)
1380 count = len(web.repo)
1381 pos = rev
1381 pos = rev
1382
1382
1383 uprev = min(max(0, count - 1), rev + revcount)
1383 uprev = min(max(0, count - 1), rev + revcount)
1384 downrev = max(0, rev - revcount)
1384 downrev = max(0, rev - revcount)
1385 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
1385 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
1386
1386
1387 tree = []
1387 tree = []
1388 nextentry = []
1388 nextentry = []
1389 lastrev = 0
1389 lastrev = 0
1390 if pos != -1:
1390 if pos != -1:
1391 allrevs = web.repo.changelog.revs(pos, 0)
1391 allrevs = web.repo.changelog.revs(pos, 0)
1392 revs = []
1392 revs = []
1393 for i in allrevs:
1393 for i in allrevs:
1394 revs.append(i)
1394 revs.append(i)
1395 if len(revs) >= revcount + 1:
1395 if len(revs) >= revcount + 1:
1396 break
1396 break
1397
1397
1398 if len(revs) > revcount:
1398 if len(revs) > revcount:
1399 nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])]
1399 nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])]
1400 revs = revs[:-1]
1400 revs = revs[:-1]
1401
1401
1402 lastrev = revs[-1]
1402 lastrev = revs[-1]
1403
1403
1404 # We have to feed a baseset to dagwalker as it is expecting smartset
1404 # We have to feed a baseset to dagwalker as it is expecting smartset
1405 # object. This does not have a big impact on hgweb performance itself
1405 # object. This does not have a big impact on hgweb performance itself
1406 # since hgweb graphing code is not itself lazy yet.
1406 # since hgweb graphing code is not itself lazy yet.
1407 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1407 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1408 # As we said one line above... not lazy.
1408 # As we said one line above... not lazy.
1409 tree = list(
1409 tree = list(
1410 item
1410 item
1411 for item in graphmod.colored(dag, web.repo)
1411 for item in graphmod.colored(dag, web.repo)
1412 if item[1] == graphmod.CHANGESET
1412 if item[1] == graphmod.CHANGESET
1413 )
1413 )
1414
1414
1415 def fulltree():
1415 def fulltree():
1416 pos = web.repo[graphtop].rev()
1416 pos = web.repo[graphtop].rev()
1417 tree = []
1417 tree = []
1418 if pos != -1:
1418 if pos != -1:
1419 revs = web.repo.changelog.revs(pos, lastrev)
1419 revs = web.repo.changelog.revs(pos, lastrev)
1420 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1420 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1421 tree = list(
1421 tree = list(
1422 item
1422 item
1423 for item in graphmod.colored(dag, web.repo)
1423 for item in graphmod.colored(dag, web.repo)
1424 if item[1] == graphmod.CHANGESET
1424 if item[1] == graphmod.CHANGESET
1425 )
1425 )
1426 return tree
1426 return tree
1427
1427
1428 def jsdata(context):
1428 def jsdata(context):
1429 for id, type, ctx, vtx, edges in fulltree():
1429 for id, type, ctx, vtx, edges in fulltree():
1430 yield {
1430 yield {
1431 b'node': pycompat.bytestr(ctx),
1431 b'node': pycompat.bytestr(ctx),
1432 b'graphnode': webutil.getgraphnode(web.repo, ctx),
1432 b'graphnode': webutil.getgraphnode(web.repo, ctx),
1433 b'vertex': vtx,
1433 b'vertex': vtx,
1434 b'edges': edges,
1434 b'edges': edges,
1435 }
1435 }
1436
1436
1437 def nodes(context):
1437 def nodes(context):
1438 parity = paritygen(web.stripecount)
1438 parity = paritygen(web.stripecount)
1439 for row, (id, type, ctx, vtx, edges) in enumerate(tree):
1439 for row, (id, type, ctx, vtx, edges) in enumerate(tree):
1440 entry = webutil.commonentry(web.repo, ctx)
1440 entry = webutil.commonentry(web.repo, ctx)
1441 edgedata = [
1441 edgedata = [
1442 {
1442 {
1443 b'col': edge[0],
1443 b'col': edge[0],
1444 b'nextcol': edge[1],
1444 b'nextcol': edge[1],
1445 b'color': (edge[2] - 1) % 6 + 1,
1445 b'color': (edge[2] - 1) % 6 + 1,
1446 b'width': edge[3],
1446 b'width': edge[3],
1447 b'bcolor': edge[4],
1447 b'bcolor': edge[4],
1448 }
1448 }
1449 for edge in edges
1449 for edge in edges
1450 ]
1450 ]
1451
1451
1452 entry.update(
1452 entry.update(
1453 {
1453 {
1454 b'col': vtx[0],
1454 b'col': vtx[0],
1455 b'color': (vtx[1] - 1) % 6 + 1,
1455 b'color': (vtx[1] - 1) % 6 + 1,
1456 b'parity': next(parity),
1456 b'parity': next(parity),
1457 b'edges': templateutil.mappinglist(edgedata),
1457 b'edges': templateutil.mappinglist(edgedata),
1458 b'row': row,
1458 b'row': row,
1459 b'nextrow': row + 1,
1459 b'nextrow': row + 1,
1460 }
1460 }
1461 )
1461 )
1462
1462
1463 yield entry
1463 yield entry
1464
1464
1465 rows = len(tree)
1465 rows = len(tree)
1466
1466
1467 return web.sendtemplate(
1467 return web.sendtemplate(
1468 b'graph',
1468 b'graph',
1469 rev=rev,
1469 rev=rev,
1470 symrev=symrev,
1470 symrev=symrev,
1471 revcount=revcount,
1471 revcount=revcount,
1472 uprev=uprev,
1472 uprev=uprev,
1473 lessvars=lessvars,
1473 lessvars=lessvars,
1474 morevars=morevars,
1474 morevars=morevars,
1475 downrev=downrev,
1475 downrev=downrev,
1476 graphvars=graphvars,
1476 graphvars=graphvars,
1477 rows=rows,
1477 rows=rows,
1478 bg_height=bg_height,
1478 bg_height=bg_height,
1479 changesets=count,
1479 changesets=count,
1480 nextentry=templateutil.mappinglist(nextentry),
1480 nextentry=templateutil.mappinglist(nextentry),
1481 jsdata=templateutil.mappinggenerator(jsdata),
1481 jsdata=templateutil.mappinggenerator(jsdata),
1482 nodes=templateutil.mappinggenerator(nodes),
1482 nodes=templateutil.mappinggenerator(nodes),
1483 node=ctx.hex(),
1483 node=ctx.hex(),
1484 archives=web.archivelist(b'tip'),
1484 archives=web.archivelist(b'tip'),
1485 changenav=changenav,
1485 changenav=changenav,
1486 )
1486 )
1487
1487
1488
1488
1489 def _getdoc(e):
1489 def _getdoc(e):
1490 doc = e[0].__doc__
1490 doc = e[0].__doc__
1491 if doc:
1491 if doc:
1492 doc = _(doc).partition(b'\n')[0]
1492 doc = _(doc).partition(b'\n')[0]
1493 else:
1493 else:
1494 doc = _(b'(no help text available)')
1494 doc = _(b'(no help text available)')
1495 return doc
1495 return doc
1496
1496
1497
1497
1498 @webcommand(b'help')
1498 @webcommand(b'help')
1499 def help(web):
1499 def help(web):
1500 """
1500 """
1501 /help[/{topic}]
1501 /help[/{topic}]
1502 ---------------
1502 ---------------
1503
1503
1504 Render help documentation.
1504 Render help documentation.
1505
1505
1506 This web command is roughly equivalent to :hg:`help`. If a ``topic``
1506 This web command is roughly equivalent to :hg:`help`. If a ``topic``
1507 is defined, that help topic will be rendered. If not, an index of
1507 is defined, that help topic will be rendered. If not, an index of
1508 available help topics will be rendered.
1508 available help topics will be rendered.
1509
1509
1510 The ``help`` template will be rendered when requesting help for a topic.
1510 The ``help`` template will be rendered when requesting help for a topic.
1511 ``helptopics`` will be rendered for the index of help topics.
1511 ``helptopics`` will be rendered for the index of help topics.
1512 """
1512 """
1513 from .. import commands, help as helpmod # avoid cycle
1513 from .. import commands, help as helpmod # avoid cycle
1514
1514
1515 topicname = web.req.qsparams.get(b'node')
1515 topicname = web.req.qsparams.get(b'node')
1516 if not topicname:
1516 if not topicname:
1517
1517
1518 def topics(context):
1518 def topics(context):
1519 for h in helpmod.helptable:
1519 for h in helpmod.helptable:
1520 entries, summary, _doc = h[0:3]
1520 entries, summary, _doc = h[0:3]
1521 yield {b'topic': entries[0], b'summary': summary}
1521 yield {b'topic': entries[0], b'summary': summary}
1522
1522
1523 early, other = [], []
1523 early, other = [], []
1524 primary = lambda s: s.partition(b'|')[0]
1524 primary = lambda s: s.partition(b'|')[0]
1525 for c, e in commands.table.items():
1525 for c, e in commands.table.items():
1526 doc = _getdoc(e)
1526 doc = _getdoc(e)
1527 if b'DEPRECATED' in doc or c.startswith(b'debug'):
1527 if b'DEPRECATED' in doc or c.startswith(b'debug'):
1528 continue
1528 continue
1529 cmd = primary(c)
1529 cmd = primary(c)
1530 if getattr(e[0], 'helpbasic', False):
1530 if getattr(e[0], 'helpbasic', False):
1531 early.append((cmd, doc))
1531 early.append((cmd, doc))
1532 else:
1532 else:
1533 other.append((cmd, doc))
1533 other.append((cmd, doc))
1534
1534
1535 early.sort()
1535 early.sort()
1536 other.sort()
1536 other.sort()
1537
1537
1538 def earlycommands(context):
1538 def earlycommands(context):
1539 for c, doc in early:
1539 for c, doc in early:
1540 yield {b'topic': c, b'summary': doc}
1540 yield {b'topic': c, b'summary': doc}
1541
1541
1542 def othercommands(context):
1542 def othercommands(context):
1543 for c, doc in other:
1543 for c, doc in other:
1544 yield {b'topic': c, b'summary': doc}
1544 yield {b'topic': c, b'summary': doc}
1545
1545
1546 return web.sendtemplate(
1546 return web.sendtemplate(
1547 b'helptopics',
1547 b'helptopics',
1548 topics=templateutil.mappinggenerator(topics),
1548 topics=templateutil.mappinggenerator(topics),
1549 earlycommands=templateutil.mappinggenerator(earlycommands),
1549 earlycommands=templateutil.mappinggenerator(earlycommands),
1550 othercommands=templateutil.mappinggenerator(othercommands),
1550 othercommands=templateutil.mappinggenerator(othercommands),
1551 title=b'Index',
1551 title=b'Index',
1552 )
1552 )
1553
1553
1554 # Render an index of sub-topics.
1554 # Render an index of sub-topics.
1555 if topicname in helpmod.subtopics:
1555 if topicname in helpmod.subtopics:
1556 topics = []
1556 topics = []
1557 for entries, summary, _doc in helpmod.subtopics[topicname]:
1557 for entries, summary, _doc in helpmod.subtopics[topicname]:
1558 topics.append(
1558 topics.append(
1559 {
1559 {
1560 b'topic': b'%s.%s' % (topicname, entries[0]),
1560 b'topic': b'%s.%s' % (topicname, entries[0]),
1561 b'basename': entries[0],
1561 b'basename': entries[0],
1562 b'summary': summary,
1562 b'summary': summary,
1563 }
1563 }
1564 )
1564 )
1565
1565
1566 return web.sendtemplate(
1566 return web.sendtemplate(
1567 b'helptopics',
1567 b'helptopics',
1568 topics=templateutil.mappinglist(topics),
1568 topics=templateutil.mappinglist(topics),
1569 title=topicname,
1569 title=topicname,
1570 subindex=True,
1570 subindex=True,
1571 )
1571 )
1572
1572
1573 u = webutil.wsgiui.load()
1573 u = webutil.wsgiui.load()
1574 u.verbose = True
1574 u.verbose = True
1575
1575
1576 # Render a page from a sub-topic.
1576 # Render a page from a sub-topic.
1577 if b'.' in topicname:
1577 if b'.' in topicname:
1578 # TODO implement support for rendering sections, like
1578 # TODO implement support for rendering sections, like
1579 # `hg help` works.
1579 # `hg help` works.
1580 topic, subtopic = topicname.split(b'.', 1)
1580 topic, subtopic = topicname.split(b'.', 1)
1581 if topic not in helpmod.subtopics:
1581 if topic not in helpmod.subtopics:
1582 raise ErrorResponse(HTTP_NOT_FOUND)
1582 raise ErrorResponse(HTTP_NOT_FOUND)
1583 else:
1583 else:
1584 topic = topicname
1584 topic = topicname
1585 subtopic = None
1585 subtopic = None
1586
1586
1587 try:
1587 try:
1588 doc = helpmod.help_(u, commands, topic, subtopic=subtopic)
1588 doc = helpmod.help_(u, commands, topic, subtopic=subtopic)
1589 except error.Abort:
1589 except error.Abort:
1590 raise ErrorResponse(HTTP_NOT_FOUND)
1590 raise ErrorResponse(HTTP_NOT_FOUND)
1591
1591
1592 return web.sendtemplate(b'help', topic=topicname, doc=doc)
1592 return web.sendtemplate(b'help', topic=topicname, doc=doc)
1593
1593
1594
1594
1595 # tell hggettext to extract docstrings from these functions:
1595 # tell hggettext to extract docstrings from these functions:
1596 i18nfunctions = commands.values()
1596 i18nfunctions = commands.values()
@@ -1,692 +1,692
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, see
12 # License along with this library; if not, see
13 # <http://www.gnu.org/licenses/>.
13 # <http://www.gnu.org/licenses/>.
14
14
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17
17
18 # Modified by Benoit Boissinot:
18 # Modified by Benoit Boissinot:
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 # Modified by Dirkjan Ochtman:
20 # Modified by Dirkjan Ochtman:
21 # - import md5 function from a local util module
21 # - import md5 function from a local util module
22 # Modified by Augie Fackler:
22 # Modified by Augie Fackler:
23 # - add safesend method and use it to prevent broken pipe errors
23 # - add safesend method and use it to prevent broken pipe errors
24 # on large POST requests
24 # on large POST requests
25
25
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
27
27
28 >>> import urllib2
28 >>> import urllib2
29 >>> from keepalive import HTTPHandler
29 >>> from keepalive import HTTPHandler
30 >>> keepalive_handler = HTTPHandler()
30 >>> keepalive_handler = HTTPHandler()
31 >>> opener = urlreq.buildopener(keepalive_handler)
31 >>> opener = urlreq.buildopener(keepalive_handler)
32 >>> urlreq.installopener(opener)
32 >>> urlreq.installopener(opener)
33 >>>
33 >>>
34 >>> fo = urlreq.urlopen('http://www.python.org')
34 >>> fo = urlreq.urlopen('http://www.python.org')
35
35
36 If a connection to a given host is requested, and all of the existing
36 If a connection to a given host is requested, and all of the existing
37 connections are still in use, another connection will be opened. If
37 connections are still in use, another connection will be opened. If
38 the handler tries to use an existing connection but it fails in some
38 the handler tries to use an existing connection but it fails in some
39 way, it will be closed and removed from the pool.
39 way, it will be closed and removed from the pool.
40
40
41 To remove the handler, simply re-run build_opener with no arguments, and
41 To remove the handler, simply re-run build_opener with no arguments, and
42 install that opener.
42 install that opener.
43
43
44 You can explicitly close connections by using the close_connection()
44 You can explicitly close connections by using the close_connection()
45 method of the returned file-like object (described below) or you can
45 method of the returned file-like object (described below) or you can
46 use the handler methods:
46 use the handler methods:
47
47
48 close_connection(host)
48 close_connection(host)
49 close_all()
49 close_all()
50 open_connections()
50 open_connections()
51
51
52 NOTE: using the close_connection and close_all methods of the handler
52 NOTE: using the close_connection and close_all methods of the handler
53 should be done with care when using multiple threads.
53 should be done with care when using multiple threads.
54 * there is nothing that prevents another thread from creating new
54 * there is nothing that prevents another thread from creating new
55 connections immediately after connections are closed
55 connections immediately after connections are closed
56 * no checks are done to prevent in-use connections from being closed
56 * no checks are done to prevent in-use connections from being closed
57
57
58 >>> keepalive_handler.close_all()
58 >>> keepalive_handler.close_all()
59
59
60 EXTRA ATTRIBUTES AND METHODS
60 EXTRA ATTRIBUTES AND METHODS
61
61
62 Upon a status of 200, the object returned has a few additional
62 Upon a status of 200, the object returned has a few additional
63 attributes and methods, which should not be used if you want to
63 attributes and methods, which should not be used if you want to
64 remain consistent with the normal urllib2-returned objects:
64 remain consistent with the normal urllib2-returned objects:
65
65
66 close_connection() - close the connection to the host
66 close_connection() - close the connection to the host
67 readlines() - you know, readlines()
67 readlines() - you know, readlines()
68 status - the return status (i.e. 404)
68 status - the return status (i.e. 404)
69 reason - english translation of status (i.e. 'File not found')
69 reason - english translation of status (i.e. 'File not found')
70
70
71 If you want the best of both worlds, use this inside an
71 If you want the best of both worlds, use this inside an
72 AttributeError-catching try:
72 AttributeError-catching try:
73
73
74 >>> try: status = fo.status
74 >>> try: status = fo.status
75 >>> except AttributeError: status = None
75 >>> except AttributeError: status = None
76
76
77 Unfortunately, these are ONLY there if status == 200, so it's not
77 Unfortunately, these are ONLY there if status == 200, so it's not
78 easy to distinguish between non-200 responses. The reason is that
78 easy to distinguish between non-200 responses. The reason is that
79 urllib2 tries to do clever things with error codes 301, 302, 401,
79 urllib2 tries to do clever things with error codes 301, 302, 401,
80 and 407, and it wraps the object upon return.
80 and 407, and it wraps the object upon return.
81 """
81 """
82
82
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
84
84
85
85
86 import collections
86 import collections
87 import hashlib
87 import hashlib
88 import socket
88 import socket
89 import sys
89 import sys
90 import threading
90 import threading
91
91
92 from .i18n import _
92 from .i18n import _
93 from .node import hex
93 from .node import hex
94 from . import (
94 from . import (
95 pycompat,
95 pycompat,
96 urllibcompat,
96 urllibcompat,
97 util,
97 util,
98 )
98 )
99 from .utils import procutil
99 from .utils import procutil
100
100
101 httplib = util.httplib
101 httplib = util.httplib
102 urlerr = util.urlerr
102 urlerr = util.urlerr
103 urlreq = util.urlreq
103 urlreq = util.urlreq
104
104
105 DEBUG = None
105 DEBUG = None
106
106
107
107
108 class ConnectionManager:
108 class ConnectionManager:
109 """
109 """
110 The connection manager must be able to:
110 The connection manager must be able to:
111 * keep track of all existing
111 * keep track of all existing
112 """
112 """
113
113
114 def __init__(self):
114 def __init__(self):
115 self._lock = threading.Lock()
115 self._lock = threading.Lock()
116 self._hostmap = collections.defaultdict(list) # host -> [connection]
116 self._hostmap = collections.defaultdict(list) # host -> [connection]
117 self._connmap = {} # map connections to host
117 self._connmap = {} # map connections to host
118 self._readymap = {} # map connection to ready state
118 self._readymap = {} # map connection to ready state
119
119
120 def add(self, host, connection, ready):
120 def add(self, host, connection, ready):
121 self._lock.acquire()
121 self._lock.acquire()
122 try:
122 try:
123 self._hostmap[host].append(connection)
123 self._hostmap[host].append(connection)
124 self._connmap[connection] = host
124 self._connmap[connection] = host
125 self._readymap[connection] = ready
125 self._readymap[connection] = ready
126 finally:
126 finally:
127 self._lock.release()
127 self._lock.release()
128
128
129 def remove(self, connection):
129 def remove(self, connection):
130 self._lock.acquire()
130 self._lock.acquire()
131 try:
131 try:
132 try:
132 try:
133 host = self._connmap[connection]
133 host = self._connmap[connection]
134 except KeyError:
134 except KeyError:
135 pass
135 pass
136 else:
136 else:
137 del self._connmap[connection]
137 del self._connmap[connection]
138 del self._readymap[connection]
138 del self._readymap[connection]
139 self._hostmap[host].remove(connection)
139 self._hostmap[host].remove(connection)
140 if not self._hostmap[host]:
140 if not self._hostmap[host]:
141 del self._hostmap[host]
141 del self._hostmap[host]
142 finally:
142 finally:
143 self._lock.release()
143 self._lock.release()
144
144
145 def set_ready(self, connection, ready):
145 def set_ready(self, connection, ready):
146 try:
146 try:
147 self._readymap[connection] = ready
147 self._readymap[connection] = ready
148 except KeyError:
148 except KeyError:
149 pass
149 pass
150
150
151 def get_ready_conn(self, host):
151 def get_ready_conn(self, host):
152 conn = None
152 conn = None
153 self._lock.acquire()
153 self._lock.acquire()
154 try:
154 try:
155 for c in self._hostmap[host]:
155 for c in self._hostmap[host]:
156 if self._readymap[c]:
156 if self._readymap[c]:
157 self._readymap[c] = False
157 self._readymap[c] = False
158 conn = c
158 conn = c
159 break
159 break
160 finally:
160 finally:
161 self._lock.release()
161 self._lock.release()
162 return conn
162 return conn
163
163
164 def get_all(self, host=None):
164 def get_all(self, host=None):
165 if host:
165 if host:
166 return list(self._hostmap[host])
166 return list(self._hostmap[host])
167 else:
167 else:
168 return dict(
168 return dict(
169 {h: list(conns) for (h, conns) in self._hostmap.items()}
169 {h: list(conns) for (h, conns) in self._hostmap.items()}
170 )
170 )
171
171
172
172
173 class KeepAliveHandler:
173 class KeepAliveHandler:
174 def __init__(self, timeout=None):
174 def __init__(self, timeout=None):
175 self._cm = ConnectionManager()
175 self._cm = ConnectionManager()
176 self._timeout = timeout
176 self._timeout = timeout
177 self.requestscount = 0
177 self.requestscount = 0
178 self.sentbytescount = 0
178 self.sentbytescount = 0
179
179
180 #### Connection Management
180 #### Connection Management
181 def open_connections(self):
181 def open_connections(self):
182 """return a list of connected hosts and the number of connections
182 """return a list of connected hosts and the number of connections
183 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
183 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
184 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
184 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
185
185
186 def close_connection(self, host):
186 def close_connection(self, host):
187 """close connection(s) to <host>
187 """close connection(s) to <host>
188 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
188 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
189 no error occurs if there is no connection to that host."""
189 no error occurs if there is no connection to that host."""
190 for h in self._cm.get_all(host):
190 for h in self._cm.get_all(host):
191 self._cm.remove(h)
191 self._cm.remove(h)
192 h.close()
192 h.close()
193
193
194 def close_all(self):
194 def close_all(self):
195 """close all open connections"""
195 """close all open connections"""
196 for host, conns in self._cm.get_all().items():
196 for host, conns in self._cm.get_all().items():
197 for h in conns:
197 for h in conns:
198 self._cm.remove(h)
198 self._cm.remove(h)
199 h.close()
199 h.close()
200
200
201 def _request_closed(self, request, host, connection):
201 def _request_closed(self, request, host, connection):
202 """tells us that this request is now closed and that the
202 """tells us that this request is now closed and that the
203 connection is ready for another request"""
203 connection is ready for another request"""
204 self._cm.set_ready(connection, True)
204 self._cm.set_ready(connection, True)
205
205
206 def _remove_connection(self, host, connection, close=0):
206 def _remove_connection(self, host, connection, close=0):
207 if close:
207 if close:
208 connection.close()
208 connection.close()
209 self._cm.remove(connection)
209 self._cm.remove(connection)
210
210
211 #### Transaction Execution
211 #### Transaction Execution
212 def http_open(self, req):
212 def http_open(self, req):
213 return self.do_open(HTTPConnection, req)
213 return self.do_open(HTTPConnection, req)
214
214
215 def do_open(self, http_class, req):
215 def do_open(self, http_class, req):
216 host = urllibcompat.gethost(req)
216 host = urllibcompat.gethost(req)
217 if not host:
217 if not host:
218 raise urlerr.urlerror(b'no host given')
218 raise urlerr.urlerror(b'no host given')
219
219
220 try:
220 try:
221 h = self._cm.get_ready_conn(host)
221 h = self._cm.get_ready_conn(host)
222 while h:
222 while h:
223 r = self._reuse_connection(h, req, host)
223 r = self._reuse_connection(h, req, host)
224
224
225 # if this response is non-None, then it worked and we're
225 # if this response is non-None, then it worked and we're
226 # done. Break out, skipping the else block.
226 # done. Break out, skipping the else block.
227 if r:
227 if r:
228 break
228 break
229
229
230 # connection is bad - possibly closed by server
230 # connection is bad - possibly closed by server
231 # discard it and ask for the next free connection
231 # discard it and ask for the next free connection
232 h.close()
232 h.close()
233 self._cm.remove(h)
233 self._cm.remove(h)
234 h = self._cm.get_ready_conn(host)
234 h = self._cm.get_ready_conn(host)
235 else:
235 else:
236 # no (working) free connections were found. Create a new one.
236 # no (working) free connections were found. Create a new one.
237 h = http_class(host, timeout=self._timeout)
237 h = http_class(host, timeout=self._timeout)
238 if DEBUG:
238 if DEBUG:
239 DEBUG.info(
239 DEBUG.info(
240 b"creating new connection to %s (%d)", host, id(h)
240 b"creating new connection to %s (%d)", host, id(h)
241 )
241 )
242 self._cm.add(host, h, False)
242 self._cm.add(host, h, False)
243 self._start_transaction(h, req)
243 self._start_transaction(h, req)
244 r = h.getresponse()
244 r = h.getresponse()
245 # The string form of BadStatusLine is the status line. Add some context
245 # The string form of BadStatusLine is the status line. Add some context
246 # to make the error message slightly more useful.
246 # to make the error message slightly more useful.
247 except httplib.BadStatusLine as err:
247 except httplib.BadStatusLine as err:
248 raise urlerr.urlerror(
248 raise urlerr.urlerror(
249 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
249 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
250 )
250 )
251 except (socket.error, httplib.HTTPException) as err:
251 except (socket.error, httplib.HTTPException) as err:
252 raise urlerr.urlerror(err)
252 raise urlerr.urlerror(err)
253
253
254 # If not a persistent connection, don't try to reuse it. Look
254 # If not a persistent connection, don't try to reuse it. Look
255 # for this using getattr() since vcr doesn't define this
255 # for this using getattr() since vcr doesn't define this
256 # attribute, and in that case always close the connection.
256 # attribute, and in that case always close the connection.
257 if getattr(r, 'will_close', True):
257 if getattr(r, 'will_close', True):
258 self._cm.remove(h)
258 self._cm.remove(h)
259
259
260 if DEBUG:
260 if DEBUG:
261 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
261 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
262 r._handler = self
262 r._handler = self
263 r._host = host
263 r._host = host
264 r._url = req.get_full_url()
264 r._url = req.get_full_url()
265 r._connection = h
265 r._connection = h
266 r.code = r.status
266 r.code = r.status
267 r.headers = r.msg
267 r.headers = r.msg
268 r.msg = r.reason
268 r.msg = r.reason
269
269
270 return r
270 return r
271
271
272 def _reuse_connection(self, h, req, host):
272 def _reuse_connection(self, h, req, host):
273 """start the transaction with a re-used connection
273 """start the transaction with a re-used connection
274 return a response object (r) upon success or None on failure.
274 return a response object (r) upon success or None on failure.
275 This DOES not close or remove bad connections in cases where
275 This DOES not close or remove bad connections in cases where
276 it returns. However, if an unexpected exception occurs, it
276 it returns. However, if an unexpected exception occurs, it
277 will close and remove the connection before re-raising.
277 will close and remove the connection before re-raising.
278 """
278 """
279 try:
279 try:
280 self._start_transaction(h, req)
280 self._start_transaction(h, req)
281 r = h.getresponse()
281 r = h.getresponse()
282 # note: just because we got something back doesn't mean it
282 # note: just because we got something back doesn't mean it
283 # worked. We'll check the version below, too.
283 # worked. We'll check the version below, too.
284 except (socket.error, httplib.HTTPException):
284 except (socket.error, httplib.HTTPException):
285 r = None
285 r = None
286 except: # re-raises
286 except: # re-raises
287 # adding this block just in case we've missed
287 # adding this block just in case we've missed
288 # something we will still raise the exception, but
288 # something we will still raise the exception, but
289 # lets try and close the connection and remove it
289 # lets try and close the connection and remove it
290 # first. We previously got into a nasty loop
290 # first. We previously got into a nasty loop
291 # where an exception was uncaught, and so the
291 # where an exception was uncaught, and so the
292 # connection stayed open. On the next try, the
292 # connection stayed open. On the next try, the
293 # same exception was raised, etc. The trade-off is
293 # same exception was raised, etc. The trade-off is
294 # that it's now possible this call will raise
294 # that it's now possible this call will raise
295 # a DIFFERENT exception
295 # a DIFFERENT exception
296 if DEBUG:
296 if DEBUG:
297 DEBUG.error(
297 DEBUG.error(
298 b"unexpected exception - closing connection to %s (%d)",
298 b"unexpected exception - closing connection to %s (%d)",
299 host,
299 host,
300 id(h),
300 id(h),
301 )
301 )
302 self._cm.remove(h)
302 self._cm.remove(h)
303 h.close()
303 h.close()
304 raise
304 raise
305
305
306 if r is None or r.version == 9:
306 if r is None or r.version == 9:
307 # httplib falls back to assuming HTTP 0.9 if it gets a
307 # httplib falls back to assuming HTTP 0.9 if it gets a
308 # bad header back. This is most likely to happen if
308 # bad header back. This is most likely to happen if
309 # the socket has been closed by the server since we
309 # the socket has been closed by the server since we
310 # last used the connection.
310 # last used the connection.
311 if DEBUG:
311 if DEBUG:
312 DEBUG.info(
312 DEBUG.info(
313 b"failed to re-use connection to %s (%d)", host, id(h)
313 b"failed to re-use connection to %s (%d)", host, id(h)
314 )
314 )
315 r = None
315 r = None
316 else:
316 else:
317 if DEBUG:
317 if DEBUG:
318 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
318 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
319
319
320 return r
320 return r
321
321
322 def _start_transaction(self, h, req):
322 def _start_transaction(self, h, req):
323 oldbytescount = getattr(h, 'sentbytescount', 0)
323 oldbytescount = getattr(h, 'sentbytescount', 0)
324
324
325 # What follows mostly reimplements HTTPConnection.request()
325 # What follows mostly reimplements HTTPConnection.request()
326 # except it adds self.parent.addheaders in the mix and sends headers
326 # except it adds self.parent.addheaders in the mix and sends headers
327 # in a deterministic order (to make testing easier).
327 # in a deterministic order (to make testing easier).
328 headers = util.sortdict(self.parent.addheaders)
328 headers = util.sortdict(self.parent.addheaders)
329 headers.update(sorted(req.headers.items()))
329 headers.update(sorted(req.headers.items()))
330 headers.update(sorted(req.unredirected_hdrs.items()))
330 headers.update(sorted(req.unredirected_hdrs.items()))
331 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
331 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
332 skipheaders = {}
332 skipheaders = {}
333 for n in ('host', 'accept-encoding'):
333 for n in ('host', 'accept-encoding'):
334 if n in headers:
334 if n in headers:
335 skipheaders['skip_' + n.replace('-', '_')] = 1
335 skipheaders['skip_' + n.replace('-', '_')] = 1
336 try:
336 try:
337 if urllibcompat.hasdata(req):
337 if urllibcompat.hasdata(req):
338 data = urllibcompat.getdata(req)
338 data = urllibcompat.getdata(req)
339 h.putrequest(
339 h.putrequest(
340 req.get_method(),
340 req.get_method(),
341 urllibcompat.getselector(req),
341 urllibcompat.getselector(req),
342 **skipheaders
342 **skipheaders
343 )
343 )
344 if 'content-type' not in headers:
344 if 'content-type' not in headers:
345 h.putheader(
345 h.putheader(
346 'Content-type', 'application/x-www-form-urlencoded'
346 'Content-type', 'application/x-www-form-urlencoded'
347 )
347 )
348 if 'content-length' not in headers:
348 if 'content-length' not in headers:
349 h.putheader('Content-length', '%d' % len(data))
349 h.putheader('Content-length', '%d' % len(data))
350 else:
350 else:
351 h.putrequest(
351 h.putrequest(
352 req.get_method(),
352 req.get_method(),
353 urllibcompat.getselector(req),
353 urllibcompat.getselector(req),
354 **skipheaders
354 **skipheaders
355 )
355 )
356 except socket.error as err:
356 except socket.error as err:
357 raise urlerr.urlerror(err)
357 raise urlerr.urlerror(err)
358 for k, v in headers.items():
358 for k, v in headers.items():
359 h.putheader(k, v)
359 h.putheader(k, v)
360 h.endheaders()
360 h.endheaders()
361 if urllibcompat.hasdata(req):
361 if urllibcompat.hasdata(req):
362 h.send(data)
362 h.send(data)
363
363
364 # This will fail to record events in case of I/O failure. That's OK.
364 # This will fail to record events in case of I/O failure. That's OK.
365 self.requestscount += 1
365 self.requestscount += 1
366 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
366 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
367
367
368 try:
368 try:
369 self.parent.requestscount += 1
369 self.parent.requestscount += 1
370 self.parent.sentbytescount += (
370 self.parent.sentbytescount += (
371 getattr(h, 'sentbytescount', 0) - oldbytescount
371 getattr(h, 'sentbytescount', 0) - oldbytescount
372 )
372 )
373 except AttributeError:
373 except AttributeError:
374 pass
374 pass
375
375
376
376
377 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
377 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
378 pass
378 pass
379
379
380
380
381 class HTTPResponse(httplib.HTTPResponse):
381 class HTTPResponse(httplib.HTTPResponse):
382 # we need to subclass HTTPResponse in order to
382 # we need to subclass HTTPResponse in order to
383 # 1) add close_connection() method
383 # 1) add close_connection() method
384 # 2) add geturl() method
384 # 2) add geturl() method
385 # 3) add accounting for read(), readlines() and readinto()
385 # 3) add accounting for read(), readlines() and readinto()
386
386
387 def __init__(self, sock, debuglevel=0, strict=0, method=None):
387 def __init__(self, sock, debuglevel=0, strict=0, method=None):
388 httplib.HTTPResponse.__init__(
388 httplib.HTTPResponse.__init__(
389 self, sock, debuglevel=debuglevel, method=method
389 self, sock, debuglevel=debuglevel, method=method
390 )
390 )
391 self.fileno = sock.fileno
391 self.fileno = sock.fileno
392 self.code = None
392 self.code = None
393 self.receivedbytescount = 0
393 self.receivedbytescount = 0
394 self._rbuf = b''
394 self._rbuf = b''
395 self._rbufsize = 8096
395 self._rbufsize = 8096
396 self._handler = None # inserted by the handler later
396 self._handler = None # inserted by the handler later
397 self._host = None # (same)
397 self._host = None # (same)
398 self._url = None # (same)
398 self._url = None # (same)
399 self._connection = None # (same)
399 self._connection = None # (same)
400
400
401 # Python 2.7 has a single close() which closes the socket handle.
401 # Python 2.7 has a single close() which closes the socket handle.
402 # This method was effectively renamed to _close_conn() in Python 3. But
402 # This method was effectively renamed to _close_conn() in Python 3. But
403 # there is also a close(). _close_conn() is called by methods like
403 # there is also a close(). _close_conn() is called by methods like
404 # read().
404 # read().
405
405
406 def close(self):
406 def close(self):
407 if self.fp:
407 if self.fp:
408 self.fp.close()
408 self.fp.close()
409 self.fp = None
409 self.fp = None
410 if self._handler:
410 if self._handler:
411 self._handler._request_closed(
411 self._handler._request_closed(
412 self, self._host, self._connection
412 self, self._host, self._connection
413 )
413 )
414
414
415 def _close_conn(self):
415 def _close_conn(self):
416 self.close()
416 self.close()
417
417
418 def close_connection(self):
418 def close_connection(self):
419 self._handler._remove_connection(self._host, self._connection, close=1)
419 self._handler._remove_connection(self._host, self._connection, close=1)
420 self.close()
420 self.close()
421
421
422 def geturl(self):
422 def geturl(self):
423 return self._url
423 return self._url
424
424
425 def read(self, amt=None):
425 def read(self, amt=None):
426 data = super().read(amt)
426 data = super().read(amt)
427 self.receivedbytescount += len(data)
427 self.receivedbytescount += len(data)
428 if self._connection is not None:
428 if self._connection is not None:
429 self._connection.receivedbytescount += len(data)
429 self._connection.receivedbytescount += len(data)
430 if self._handler is not None:
430 if self._handler is not None:
431 self._handler.parent.receivedbytescount += len(data)
431 self._handler.parent.receivedbytescount += len(data)
432 return data
432 return data
433
433
434 def readline(self):
434 def readline(self):
435 data = super().readline()
435 data = super().readline()
436 self.receivedbytescount += len(data)
436 self.receivedbytescount += len(data)
437 if self._connection is not None:
437 if self._connection is not None:
438 self._connection.receivedbytescount += len(data)
438 self._connection.receivedbytescount += len(data)
439 if self._handler is not None:
439 if self._handler is not None:
440 self._handler.parent.receivedbytescount += len(data)
440 self._handler.parent.receivedbytescount += len(data)
441 return data
441 return data
442
442
443 def readinto(self, dest):
443 def readinto(self, dest):
444 got = super().readinto(dest)
444 got = super().readinto(dest)
445 self.receivedbytescount += got
445 self.receivedbytescount += got
446 if self._connection is not None:
446 if self._connection is not None:
447 self._connection.receivedbytescount += got
447 self._connection.receivedbytescount += got
448 if self._handler is not None:
448 if self._handler is not None:
449 self._handler.parent.receivedbytescount += got
449 self._handler.parent.receivedbytescount += got
450 return got
450 return got
451
451
452
452
453 def safesend(self, str):
453 def safesend(self, str):
454 """Send `str' to the server.
454 """Send `str' to the server.
455
455
456 Shamelessly ripped off from httplib to patch a bad behavior.
456 Shamelessly ripped off from httplib to patch a bad behavior.
457 """
457 """
458 # _broken_pipe_resp is an attribute we set in this function
458 # _broken_pipe_resp is an attribute we set in this function
459 # if the socket is closed while we're sending data but
459 # if the socket is closed while we're sending data but
460 # the server sent us a response before hanging up.
460 # the server sent us a response before hanging up.
461 # In that case, we want to pretend to send the rest of the
461 # In that case, we want to pretend to send the rest of the
462 # outgoing data, and then let the user use getresponse()
462 # outgoing data, and then let the user use getresponse()
463 # (which we wrap) to get this last response before
463 # (which we wrap) to get this last response before
464 # opening a new socket.
464 # opening a new socket.
465 if getattr(self, '_broken_pipe_resp', None) is not None:
465 if getattr(self, '_broken_pipe_resp', None) is not None:
466 return
466 return
467
467
468 if self.sock is None:
468 if self.sock is None:
469 if self.auto_open:
469 if self.auto_open:
470 self.connect()
470 self.connect()
471 else:
471 else:
472 raise httplib.NotConnected
472 raise httplib.NotConnected
473
473
474 # send the data to the server. if we get a broken pipe, then close
474 # send the data to the server. if we get a broken pipe, then close
475 # the socket. we want to reconnect when somebody tries to send again.
475 # the socket. we want to reconnect when somebody tries to send again.
476 #
476 #
477 # NOTE: we DO propagate the error, though, because we cannot simply
477 # NOTE: we DO propagate the error, though, because we cannot simply
478 # ignore the error... the caller will know if they can retry.
478 # ignore the error... the caller will know if they can retry.
479 if self.debuglevel > 0:
479 if self.debuglevel > 0:
480 print(b"send:", repr(str))
480 print(b"send:", repr(str))
481 try:
481 try:
482 blocksize = 8192
482 blocksize = 8192
483 read = getattr(str, 'read', None)
483 read = getattr(str, 'read', None)
484 if read is not None:
484 if read is not None:
485 if self.debuglevel > 0:
485 if self.debuglevel > 0:
486 print(b"sending a read()able")
486 print(b"sending a read()able")
487 data = read(blocksize)
487 data = read(blocksize)
488 while data:
488 while data:
489 self.sock.sendall(data)
489 self.sock.sendall(data)
490 self.sentbytescount += len(data)
490 self.sentbytescount += len(data)
491 data = read(blocksize)
491 data = read(blocksize)
492 else:
492 else:
493 self.sock.sendall(str)
493 self.sock.sendall(str)
494 self.sentbytescount += len(str)
494 self.sentbytescount += len(str)
495 except BrokenPipeError:
495 except BrokenPipeError:
496 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
496 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
497 self._broken_pipe_resp = None
497 self._broken_pipe_resp = None
498 self._broken_pipe_resp = self.getresponse()
498 self._broken_pipe_resp = self.getresponse()
499 reraise = False
499 reraise = False
500 else:
500 else:
501 reraise = True
501 reraise = True
502 self.close()
502 self.close()
503 if reraise:
503 if reraise:
504 raise
504 raise
505
505
506
506
507 def wrapgetresponse(cls):
507 def wrapgetresponse(cls):
508 """Wraps getresponse in cls with a broken-pipe sane version."""
508 """Wraps getresponse in cls with a broken-pipe sane version."""
509
509
510 def safegetresponse(self):
510 def safegetresponse(self):
511 # In safesend() we might set the _broken_pipe_resp
511 # In safesend() we might set the _broken_pipe_resp
512 # attribute, in which case the socket has already
512 # attribute, in which case the socket has already
513 # been closed and we just need to give them the response
513 # been closed and we just need to give them the response
514 # back. Otherwise, we use the normal response path.
514 # back. Otherwise, we use the normal response path.
515 r = getattr(self, '_broken_pipe_resp', None)
515 r = getattr(self, '_broken_pipe_resp', None)
516 if r is not None:
516 if r is not None:
517 return r
517 return r
518 return cls.getresponse(self)
518 return cls.getresponse(self)
519
519
520 safegetresponse.__doc__ = cls.getresponse.__doc__
520 safegetresponse.__doc__ = cls.getresponse.__doc__
521 return safegetresponse
521 return safegetresponse
522
522
523
523
524 class HTTPConnection(httplib.HTTPConnection):
524 class HTTPConnection(httplib.HTTPConnection):
525 # url.httpsconnection inherits from this. So when adding/removing
525 # url.httpsconnection inherits from this. So when adding/removing
526 # attributes, be sure to audit httpsconnection() for unintended
526 # attributes, be sure to audit httpsconnection() for unintended
527 # consequences.
527 # consequences.
528
528
529 # use the modified response class
529 # use the modified response class
530 response_class = HTTPResponse
530 response_class = HTTPResponse
531 send = safesend
531 send = safesend
532 getresponse = wrapgetresponse(httplib.HTTPConnection)
532 getresponse = wrapgetresponse(httplib.HTTPConnection)
533
533
534 def __init__(self, *args, **kwargs):
534 def __init__(self, *args, **kwargs):
535 httplib.HTTPConnection.__init__(self, *args, **kwargs)
535 httplib.HTTPConnection.__init__(self, *args, **kwargs)
536 self.sentbytescount = 0
536 self.sentbytescount = 0
537 self.receivedbytescount = 0
537 self.receivedbytescount = 0
538
538
539 def __repr__(self):
539 def __repr__(self):
540 base = super(HTTPConnection, self).__repr__()
540 base = super(HTTPConnection, self).__repr__()
541 local = "(unconnected)"
541 local = "(unconnected)"
542 s = self.sock
542 s = self.sock
543 if s:
543 if s:
544 try:
544 try:
545 local = "%s:%d" % s.getsockname()
545 local = "%s:%d" % s.getsockname()
546 except OSError:
546 except OSError:
547 pass # Likely not connected
547 pass # Likely not connected
548 return "<%s: %s <--> %s:%d>" % (base, local, self.host, self.port)
548 return "<%s: %s <--> %s:%d>" % (base, local, self.host, self.port)
549
549
550
550
551 #########################################################################
551 #########################################################################
552 ##### TEST FUNCTIONS
552 ##### TEST FUNCTIONS
553 #########################################################################
553 #########################################################################
554
554
555
555
556 def continuity(url):
556 def continuity(url):
557 md5 = hashlib.md5
557 md5 = hashlib.md5
558 format = b'%25s: %s'
558 format = b'%25s: %s'
559
559
560 # first fetch the file with the normal http handler
560 # first fetch the file with the normal http handler
561 opener = urlreq.buildopener()
561 opener = urlreq.buildopener()
562 urlreq.installopener(opener)
562 urlreq.installopener(opener)
563 fo = urlreq.urlopen(url)
563 fo = urlreq.urlopen(url)
564 foo = fo.read()
564 foo = fo.read()
565 fo.close()
565 fo.close()
566 m = md5(foo)
566 m = md5(foo)
567 print(format % (b'normal urllib', hex(m.digest())))
567 print(format % (b'normal urllib', hex(m.digest())))
568
568
569 # now install the keepalive handler and try again
569 # now install the keepalive handler and try again
570 opener = urlreq.buildopener(HTTPHandler())
570 opener = urlreq.buildopener(HTTPHandler())
571 urlreq.installopener(opener)
571 urlreq.installopener(opener)
572
572
573 fo = urlreq.urlopen(url)
573 fo = urlreq.urlopen(url)
574 foo = fo.read()
574 foo = fo.read()
575 fo.close()
575 fo.close()
576 m = md5(foo)
576 m = md5(foo)
577 print(format % (b'keepalive read', hex(m.digest())))
577 print(format % (b'keepalive read', hex(m.digest())))
578
578
579 fo = urlreq.urlopen(url)
579 fo = urlreq.urlopen(url)
580 foo = b''
580 foo = b''
581 while True:
581 while True:
582 f = fo.readline()
582 f = fo.readline()
583 if f:
583 if f:
584 foo = foo + f
584 foo = foo + f
585 else:
585 else:
586 break
586 break
587 fo.close()
587 fo.close()
588 m = md5(foo)
588 m = md5(foo)
589 print(format % (b'keepalive readline', hex(m.digest())))
589 print(format % (b'keepalive readline', hex(m.digest())))
590
590
591
591
592 def comp(N, url):
592 def comp(N, url):
593 print(b' making %i connections to:\n %s' % (N, url))
593 print(b' making %i connections to:\n %s' % (N, url))
594
594
595 procutil.stdout.write(b' first using the normal urllib handlers')
595 procutil.stdout.write(b' first using the normal urllib handlers')
596 # first use normal opener
596 # first use normal opener
597 opener = urlreq.buildopener()
597 opener = urlreq.buildopener()
598 urlreq.installopener(opener)
598 urlreq.installopener(opener)
599 t1 = fetch(N, url)
599 t1 = fetch(N, url)
600 print(b' TIME: %.3f s' % t1)
600 print(b' TIME: %.3f s' % t1)
601
601
602 procutil.stdout.write(b' now using the keepalive handler ')
602 procutil.stdout.write(b' now using the keepalive handler ')
603 # now install the keepalive handler and try again
603 # now install the keepalive handler and try again
604 opener = urlreq.buildopener(HTTPHandler())
604 opener = urlreq.buildopener(HTTPHandler())
605 urlreq.installopener(opener)
605 urlreq.installopener(opener)
606 t2 = fetch(N, url)
606 t2 = fetch(N, url)
607 print(b' TIME: %.3f s' % t2)
607 print(b' TIME: %.3f s' % t2)
608 print(b' improvement factor: %.2f' % (t1 / t2))
608 print(b' improvement factor: %.2f' % (t1 / t2))
609
609
610
610
611 def fetch(N, url, delay=0):
611 def fetch(N, url, delay=0):
612 import time
612 import time
613
613
614 lens = []
614 lens = []
615 starttime = time.time()
615 starttime = time.time()
616 for i in range(N):
616 for i in range(N):
617 if delay and i > 0:
617 if delay and i > 0:
618 time.sleep(delay)
618 time.sleep(delay)
619 fo = urlreq.urlopen(url)
619 fo = urlreq.urlopen(url)
620 foo = fo.read()
620 foo = fo.read()
621 fo.close()
621 fo.close()
622 lens.append(len(foo))
622 lens.append(len(foo))
623 diff = time.time() - starttime
623 diff = time.time() - starttime
624
624
625 j = 0
625 j = 0
626 for i in lens[1:]:
626 for i in lens[1:]:
627 j = j + 1
627 j = j + 1
628 if not i == lens[0]:
628 if not i == lens[0]:
629 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
629 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
630
630
631 return diff
631 return diff
632
632
633
633
634 def test_timeout(url):
634 def test_timeout(url):
635 global DEBUG
635 global DEBUG
636 dbbackup = DEBUG
636 dbbackup = DEBUG
637
637
638 class FakeLogger:
638 class FakeLogger:
639 def debug(self, msg, *args):
639 def debug(self, msg, *args):
640 print(msg % args)
640 print(msg % args)
641
641
642 info = warning = error = debug
642 info = warning = error = debug
643
643
644 DEBUG = FakeLogger()
644 DEBUG = FakeLogger()
645 print(b" fetching the file to establish a connection")
645 print(b" fetching the file to establish a connection")
646 fo = urlreq.urlopen(url)
646 fo = urlreq.urlopen(url)
647 data1 = fo.read()
647 data1 = fo.read()
648 fo.close()
648 fo.close()
649
649
650 i = 20
650 i = 20
651 print(b" waiting %i seconds for the server to close the connection" % i)
651 print(b" waiting %i seconds for the server to close the connection" % i)
652 while i > 0:
652 while i > 0:
653 procutil.stdout.write(b'\r %2i' % i)
653 procutil.stdout.write(b'\r %2i' % i)
654 procutil.stdout.flush()
654 procutil.stdout.flush()
655 time.sleep(1)
655 time.sleep(1)
656 i -= 1
656 i -= 1
657 procutil.stderr.write(b'\r')
657 procutil.stderr.write(b'\r')
658
658
659 print(b" fetching the file a second time")
659 print(b" fetching the file a second time")
660 fo = urlreq.urlopen(url)
660 fo = urlreq.urlopen(url)
661 data2 = fo.read()
661 data2 = fo.read()
662 fo.close()
662 fo.close()
663
663
664 if data1 == data2:
664 if data1 == data2:
665 print(b' data are identical')
665 print(b' data are identical')
666 else:
666 else:
667 print(b' ERROR: DATA DIFFER')
667 print(b' ERROR: DATA DIFFER')
668
668
669 DEBUG = dbbackup
669 DEBUG = dbbackup
670
670
671
671
672 def test(url, N=10):
672 def test(url, N=10):
673 print(b"performing continuity test (making sure stuff isn't corrupted)")
673 print(b"performing continuity test (making sure stuff isn't corrupted)")
674 continuity(url)
674 continuity(url)
675 print(b'')
675 print(b'')
676 print(b"performing speed comparison")
676 print(b"performing speed comparison")
677 comp(N, url)
677 comp(N, url)
678 print(b'')
678 print(b'')
679 print(b"performing dropped-connection check")
679 print(b"performing dropped-connection check")
680 test_timeout(url)
680 test_timeout(url)
681
681
682
682
683 if __name__ == '__main__':
683 if __name__ == '__main__':
684 import time
684 import time
685
685
686 try:
686 try:
687 N = int(sys.argv[1])
687 N = int(sys.argv[1])
688 url = sys.argv[2]
688 url = sys.argv[2]
689 except (IndexError, ValueError):
689 except (IndexError, ValueError):
690 print(b"%s <integer> <url>" % sys.argv[0])
690 print("%s <integer> <url>" % sys.argv[0])
691 else:
691 else:
692 test(url, N)
692 test(url, N)
@@ -1,135 +1,135
1 import _lsprof
1 import _lsprof
2 import sys
2 import sys
3
3
4 Profiler = _lsprof.Profiler
4 Profiler = _lsprof.Profiler
5
5
6 # PyPy doesn't expose profiler_entry from the module.
6 # PyPy doesn't expose profiler_entry from the module.
7 profiler_entry = getattr(_lsprof, 'profiler_entry', None)
7 profiler_entry = getattr(_lsprof, 'profiler_entry', None)
8
8
9 __all__ = [b'profile', b'Stats']
9 __all__ = ['profile', 'Stats']
10
10
11
11
12 def profile(f, *args, **kwds):
12 def profile(f, *args, **kwds):
13 """XXX docstring"""
13 """XXX docstring"""
14 p = Profiler()
14 p = Profiler()
15 p.enable(subcalls=True, builtins=True)
15 p.enable(subcalls=True, builtins=True)
16 try:
16 try:
17 f(*args, **kwds)
17 f(*args, **kwds)
18 finally:
18 finally:
19 p.disable()
19 p.disable()
20 return Stats(p.getstats())
20 return Stats(p.getstats())
21
21
22
22
23 class Stats:
23 class Stats:
24 """XXX docstring"""
24 """XXX docstring"""
25
25
26 def __init__(self, data):
26 def __init__(self, data):
27 self.data = data
27 self.data = data
28
28
29 def sort(self, crit="inlinetime"):
29 def sort(self, crit="inlinetime"):
30 """XXX docstring"""
30 """XXX docstring"""
31 # profiler_entries isn't defined when running under PyPy.
31 # profiler_entries isn't defined when running under PyPy.
32 if profiler_entry:
32 if profiler_entry:
33 if crit not in profiler_entry.__dict__:
33 if crit not in profiler_entry.__dict__:
34 raise ValueError(b"Can't sort by %s" % crit)
34 raise ValueError(b"Can't sort by %s" % crit)
35 elif self.data and not getattr(self.data[0], crit, None):
35 elif self.data and not getattr(self.data[0], crit, None):
36 raise ValueError(b"Can't sort by %s" % crit)
36 raise ValueError(b"Can't sort by %s" % crit)
37
37
38 self.data.sort(key=lambda x: getattr(x, crit), reverse=True)
38 self.data.sort(key=lambda x: getattr(x, crit), reverse=True)
39 for e in self.data:
39 for e in self.data:
40 if e.calls:
40 if e.calls:
41 e.calls.sort(key=lambda x: getattr(x, crit), reverse=True)
41 e.calls.sort(key=lambda x: getattr(x, crit), reverse=True)
42
42
43 def pprint(self, top=None, file=None, limit=None, climit=None):
43 def pprint(self, top=None, file=None, limit=None, climit=None):
44 """XXX docstring"""
44 """XXX docstring"""
45 if file is None:
45 if file is None:
46 file = sys.stdout
46 file = sys.stdout
47 d = self.data
47 d = self.data
48 if top is not None:
48 if top is not None:
49 d = d[:top]
49 d = d[:top]
50 cols = b"% 12d %12d %11.4f %11.4f %s\n"
50 cols = b"% 12d %12d %11.4f %11.4f %s\n"
51 hcols = b"% 12s %12s %12s %12s %s\n"
51 hcols = b"% 12s %12s %12s %12s %s\n"
52 file.write(
52 file.write(
53 hcols
53 hcols
54 % (
54 % (
55 b"CallCount",
55 b"CallCount",
56 b"Recursive",
56 b"Recursive",
57 b"Total(s)",
57 b"Total(s)",
58 b"Inline(s)",
58 b"Inline(s)",
59 b"module:lineno(function)",
59 b"module:lineno(function)",
60 )
60 )
61 )
61 )
62 count = 0
62 count = 0
63 for e in d:
63 for e in d:
64 file.write(
64 file.write(
65 cols
65 cols
66 % (
66 % (
67 e.callcount,
67 e.callcount,
68 e.reccallcount,
68 e.reccallcount,
69 e.totaltime,
69 e.totaltime,
70 e.inlinetime,
70 e.inlinetime,
71 label(e.code),
71 label(e.code),
72 )
72 )
73 )
73 )
74 count += 1
74 count += 1
75 if limit is not None and count == limit:
75 if limit is not None and count == limit:
76 return
76 return
77 ccount = 0
77 ccount = 0
78 if climit and e.calls:
78 if climit and e.calls:
79 for se in e.calls:
79 for se in e.calls:
80 file.write(
80 file.write(
81 cols
81 cols
82 % (
82 % (
83 se.callcount,
83 se.callcount,
84 se.reccallcount,
84 se.reccallcount,
85 se.totaltime,
85 se.totaltime,
86 se.inlinetime,
86 se.inlinetime,
87 b" %s" % label(se.code),
87 b" %s" % label(se.code),
88 )
88 )
89 )
89 )
90 count += 1
90 count += 1
91 ccount += 1
91 ccount += 1
92 if limit is not None and count == limit:
92 if limit is not None and count == limit:
93 return
93 return
94 if climit is not None and ccount == climit:
94 if climit is not None and ccount == climit:
95 break
95 break
96
96
97 def freeze(self):
97 def freeze(self):
98 """Replace all references to code objects with string
98 """Replace all references to code objects with string
99 descriptions; this makes it possible to pickle the instance."""
99 descriptions; this makes it possible to pickle the instance."""
100
100
101 # this code is probably rather ickier than it needs to be!
101 # this code is probably rather ickier than it needs to be!
102 for i in range(len(self.data)):
102 for i in range(len(self.data)):
103 e = self.data[i]
103 e = self.data[i]
104 if not isinstance(e.code, str):
104 if not isinstance(e.code, str):
105 self.data[i] = type(e)((label(e.code),) + e[1:])
105 self.data[i] = type(e)((label(e.code),) + e[1:])
106 if e.calls:
106 if e.calls:
107 for j in range(len(e.calls)):
107 for j in range(len(e.calls)):
108 se = e.calls[j]
108 se = e.calls[j]
109 if not isinstance(se.code, str):
109 if not isinstance(se.code, str):
110 e.calls[j] = type(se)((label(se.code),) + se[1:])
110 e.calls[j] = type(se)((label(se.code),) + se[1:])
111
111
112
112
113 _fn2mod = {}
113 _fn2mod = {}
114
114
115
115
116 def label(code):
116 def label(code):
117 if isinstance(code, str):
117 if isinstance(code, str):
118 return code.encode('latin-1')
118 return code.encode('latin-1')
119 try:
119 try:
120 mname = _fn2mod[code.co_filename]
120 mname = _fn2mod[code.co_filename]
121 except KeyError:
121 except KeyError:
122 for k, v in list(sys.modules.items()):
122 for k, v in list(sys.modules.items()):
123 if v is None:
123 if v is None:
124 continue
124 continue
125 if not isinstance(getattr(v, '__file__', None), str):
125 if not isinstance(getattr(v, '__file__', None), str):
126 continue
126 continue
127 if v.__file__.startswith(code.co_filename):
127 if v.__file__.startswith(code.co_filename):
128 mname = _fn2mod[code.co_filename] = k
128 mname = _fn2mod[code.co_filename] = k
129 break
129 break
130 else:
130 else:
131 mname = _fn2mod[code.co_filename] = '<%s>' % code.co_filename
131 mname = _fn2mod[code.co_filename] = '<%s>' % code.co_filename
132
132
133 res = '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
133 res = '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
134
134
135 return res.encode('latin-1')
135 return res.encode('latin-1')
@@ -1,2520 +1,2520
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import struct
10 import struct
11 import typing
11 import typing
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import nullrev
14 from .node import nullrev
15 from .thirdparty import attr
15 from .thirdparty import attr
16
16
17 # Force pytype to use the non-vendored package
17 # Force pytype to use the non-vendored package
18 if typing.TYPE_CHECKING:
18 if typing.TYPE_CHECKING:
19 # noinspection PyPackageRequirements
19 # noinspection PyPackageRequirements
20 import attr
20 import attr
21
21
22 from .utils import stringutil
22 from .utils import stringutil
23 from .dirstateutils import timestamp
23 from .dirstateutils import timestamp
24 from . import (
24 from . import (
25 copies,
25 copies,
26 encoding,
26 encoding,
27 error,
27 error,
28 filemerge,
28 filemerge,
29 match as matchmod,
29 match as matchmod,
30 mergestate as mergestatemod,
30 mergestate as mergestatemod,
31 obsutil,
31 obsutil,
32 pathutil,
32 pathutil,
33 policy,
33 policy,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 subrepoutil,
36 subrepoutil,
37 util,
37 util,
38 worker,
38 worker,
39 )
39 )
40
40
41 _pack = struct.pack
41 _pack = struct.pack
42 _unpack = struct.unpack
42 _unpack = struct.unpack
43
43
44
44
45 def _getcheckunknownconfig(repo, section, name):
45 def _getcheckunknownconfig(repo, section, name):
46 config = repo.ui.config(section, name)
46 config = repo.ui.config(section, name)
47 valid = [b'abort', b'ignore', b'warn']
47 valid = [b'abort', b'ignore', b'warn']
48 if config not in valid:
48 if config not in valid:
49 validstr = b', '.join([b"'" + v + b"'" for v in valid])
49 validstr = b', '.join([b"'" + v + b"'" for v in valid])
50 msg = _(b"%s.%s not valid ('%s' is none of %s)")
50 msg = _(b"%s.%s not valid ('%s' is none of %s)")
51 msg %= (section, name, config, validstr)
51 msg %= (section, name, config, validstr)
52 raise error.ConfigError(msg)
52 raise error.ConfigError(msg)
53 return config
53 return config
54
54
55
55
56 def _checkunknownfile(dirstate, wvfs, dircache, wctx, mctx, f, f2=None):
56 def _checkunknownfile(dirstate, wvfs, dircache, wctx, mctx, f, f2=None):
57 if wctx.isinmemory():
57 if wctx.isinmemory():
58 # Nothing to do in IMM because nothing in the "working copy" can be an
58 # Nothing to do in IMM because nothing in the "working copy" can be an
59 # unknown file.
59 # unknown file.
60 #
60 #
61 # Note that we should bail out here, not in ``_checkunknownfiles()``,
61 # Note that we should bail out here, not in ``_checkunknownfiles()``,
62 # because that function does other useful work.
62 # because that function does other useful work.
63 return False
63 return False
64
64
65 if f2 is None:
65 if f2 is None:
66 f2 = f
66 f2 = f
67 return (
67 return (
68 wvfs.isfileorlink_checkdir(dircache, f)
68 wvfs.isfileorlink_checkdir(dircache, f)
69 and dirstate.normalize(f) not in dirstate
69 and dirstate.normalize(f) not in dirstate
70 and mctx[f2].cmp(wctx[f])
70 and mctx[f2].cmp(wctx[f])
71 )
71 )
72
72
73
73
74 class _unknowndirschecker:
74 class _unknowndirschecker:
75 """
75 """
76 Look for any unknown files or directories that may have a path conflict
76 Look for any unknown files or directories that may have a path conflict
77 with a file. If any path prefix of the file exists as a file or link,
77 with a file. If any path prefix of the file exists as a file or link,
78 then it conflicts. If the file itself is a directory that contains any
78 then it conflicts. If the file itself is a directory that contains any
79 file that is not tracked, then it conflicts.
79 file that is not tracked, then it conflicts.
80
80
81 Returns the shortest path at which a conflict occurs, or None if there is
81 Returns the shortest path at which a conflict occurs, or None if there is
82 no conflict.
82 no conflict.
83 """
83 """
84
84
85 def __init__(self):
85 def __init__(self):
86 # A set of paths known to be good. This prevents repeated checking of
86 # A set of paths known to be good. This prevents repeated checking of
87 # dirs. It will be updated with any new dirs that are checked and found
87 # dirs. It will be updated with any new dirs that are checked and found
88 # to be safe.
88 # to be safe.
89 self._unknowndircache = set()
89 self._unknowndircache = set()
90
90
91 # A set of paths that are known to be absent. This prevents repeated
91 # A set of paths that are known to be absent. This prevents repeated
92 # checking of subdirectories that are known not to exist. It will be
92 # checking of subdirectories that are known not to exist. It will be
93 # updated with any new dirs that are checked and found to be absent.
93 # updated with any new dirs that are checked and found to be absent.
94 self._missingdircache = set()
94 self._missingdircache = set()
95
95
96 def __call__(self, repo, wctx, f):
96 def __call__(self, repo, wctx, f):
97 if wctx.isinmemory():
97 if wctx.isinmemory():
98 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
98 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
99 return False
99 return False
100
100
101 # Check for path prefixes that exist as unknown files.
101 # Check for path prefixes that exist as unknown files.
102 for p in reversed(list(pathutil.finddirs(f))):
102 for p in reversed(list(pathutil.finddirs(f))):
103 if p in self._missingdircache:
103 if p in self._missingdircache:
104 return
104 return
105 if p in self._unknowndircache:
105 if p in self._unknowndircache:
106 continue
106 continue
107 if repo.wvfs.audit.check(p):
107 if repo.wvfs.audit.check(p):
108 if (
108 if (
109 repo.wvfs.isfileorlink(p)
109 repo.wvfs.isfileorlink(p)
110 and repo.dirstate.normalize(p) not in repo.dirstate
110 and repo.dirstate.normalize(p) not in repo.dirstate
111 ):
111 ):
112 return p
112 return p
113 if not repo.wvfs.lexists(p):
113 if not repo.wvfs.lexists(p):
114 self._missingdircache.add(p)
114 self._missingdircache.add(p)
115 return
115 return
116 self._unknowndircache.add(p)
116 self._unknowndircache.add(p)
117
117
118 # Check if the file conflicts with a directory containing unknown files.
118 # Check if the file conflicts with a directory containing unknown files.
119 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
119 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
120 # Does the directory contain any files that are not in the dirstate?
120 # Does the directory contain any files that are not in the dirstate?
121 for p, dirs, files in repo.wvfs.walk(f):
121 for p, dirs, files in repo.wvfs.walk(f):
122 for fn in files:
122 for fn in files:
123 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
123 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
124 relf = repo.dirstate.normalize(relf, isknown=True)
124 relf = repo.dirstate.normalize(relf, isknown=True)
125 if relf not in repo.dirstate:
125 if relf not in repo.dirstate:
126 return f
126 return f
127 return None
127 return None
128
128
129
129
130 def _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce):
130 def _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce):
131 """
131 """
132 Considers any actions that care about the presence of conflicting unknown
132 Considers any actions that care about the presence of conflicting unknown
133 files. For some actions, the result is to abort; for others, it is to
133 files. For some actions, the result is to abort; for others, it is to
134 choose a different action.
134 choose a different action.
135 """
135 """
136 fileconflicts = set()
136 fileconflicts = set()
137 pathconflicts = set()
137 pathconflicts = set()
138 warnconflicts = set()
138 warnconflicts = set()
139 abortconflicts = set()
139 abortconflicts = set()
140 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
140 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
141 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
141 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
142 pathconfig = repo.ui.configbool(
142 pathconfig = repo.ui.configbool(
143 b'experimental', b'merge.checkpathconflicts'
143 b'experimental', b'merge.checkpathconflicts'
144 )
144 )
145 dircache = dict()
145 dircache = dict()
146 dirstate = repo.dirstate
146 dirstate = repo.dirstate
147 wvfs = repo.wvfs
147 wvfs = repo.wvfs
148 if not force:
148 if not force:
149
149
150 def collectconflicts(conflicts, config):
150 def collectconflicts(conflicts, config):
151 if config == b'abort':
151 if config == b'abort':
152 abortconflicts.update(conflicts)
152 abortconflicts.update(conflicts)
153 elif config == b'warn':
153 elif config == b'warn':
154 warnconflicts.update(conflicts)
154 warnconflicts.update(conflicts)
155
155
156 checkunknowndirs = _unknowndirschecker()
156 checkunknowndirs = _unknowndirschecker()
157 for f in mresult.files(
157 for f in mresult.files(
158 (
158 (
159 mergestatemod.ACTION_CREATED,
159 mergestatemod.ACTION_CREATED,
160 mergestatemod.ACTION_DELETED_CHANGED,
160 mergestatemod.ACTION_DELETED_CHANGED,
161 )
161 )
162 ):
162 ):
163 if _checkunknownfile(dirstate, wvfs, dircache, wctx, mctx, f):
163 if _checkunknownfile(dirstate, wvfs, dircache, wctx, mctx, f):
164 fileconflicts.add(f)
164 fileconflicts.add(f)
165 elif pathconfig and f not in wctx:
165 elif pathconfig and f not in wctx:
166 path = checkunknowndirs(repo, wctx, f)
166 path = checkunknowndirs(repo, wctx, f)
167 if path is not None:
167 if path is not None:
168 pathconflicts.add(path)
168 pathconflicts.add(path)
169 for f, args, msg in mresult.getactions(
169 for f, args, msg in mresult.getactions(
170 [mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]
170 [mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]
171 ):
171 ):
172 if _checkunknownfile(
172 if _checkunknownfile(
173 dirstate, wvfs, dircache, wctx, mctx, f, args[0]
173 dirstate, wvfs, dircache, wctx, mctx, f, args[0]
174 ):
174 ):
175 fileconflicts.add(f)
175 fileconflicts.add(f)
176
176
177 allconflicts = fileconflicts | pathconflicts
177 allconflicts = fileconflicts | pathconflicts
178 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
178 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
179 unknownconflicts = allconflicts - ignoredconflicts
179 unknownconflicts = allconflicts - ignoredconflicts
180 collectconflicts(ignoredconflicts, ignoredconfig)
180 collectconflicts(ignoredconflicts, ignoredconfig)
181 collectconflicts(unknownconflicts, unknownconfig)
181 collectconflicts(unknownconflicts, unknownconfig)
182 else:
182 else:
183 for f, args, msg in list(
183 for f, args, msg in list(
184 mresult.getactions([mergestatemod.ACTION_CREATED_MERGE])
184 mresult.getactions([mergestatemod.ACTION_CREATED_MERGE])
185 ):
185 ):
186 fl2, anc = args
186 fl2, anc = args
187 different = _checkunknownfile(
187 different = _checkunknownfile(
188 dirstate, wvfs, dircache, wctx, mctx, f
188 dirstate, wvfs, dircache, wctx, mctx, f
189 )
189 )
190 if repo.dirstate._ignore(f):
190 if repo.dirstate._ignore(f):
191 config = ignoredconfig
191 config = ignoredconfig
192 else:
192 else:
193 config = unknownconfig
193 config = unknownconfig
194
194
195 # The behavior when force is True is described by this table:
195 # The behavior when force is True is described by this table:
196 # config different mergeforce | action backup
196 # config different mergeforce | action backup
197 # * n * | get n
197 # * n * | get n
198 # * y y | merge -
198 # * y y | merge -
199 # abort y n | merge - (1)
199 # abort y n | merge - (1)
200 # warn y n | warn + get y
200 # warn y n | warn + get y
201 # ignore y n | get y
201 # ignore y n | get y
202 #
202 #
203 # (1) this is probably the wrong behavior here -- we should
203 # (1) this is probably the wrong behavior here -- we should
204 # probably abort, but some actions like rebases currently
204 # probably abort, but some actions like rebases currently
205 # don't like an abort happening in the middle of
205 # don't like an abort happening in the middle of
206 # merge.update.
206 # merge.update.
207 if not different:
207 if not different:
208 mresult.addfile(
208 mresult.addfile(
209 f,
209 f,
210 mergestatemod.ACTION_GET,
210 mergestatemod.ACTION_GET,
211 (fl2, False),
211 (fl2, False),
212 b'remote created',
212 b'remote created',
213 )
213 )
214 elif mergeforce or config == b'abort':
214 elif mergeforce or config == b'abort':
215 mresult.addfile(
215 mresult.addfile(
216 f,
216 f,
217 mergestatemod.ACTION_MERGE,
217 mergestatemod.ACTION_MERGE,
218 (f, f, None, False, anc),
218 (f, f, None, False, anc),
219 b'remote differs from untracked local',
219 b'remote differs from untracked local',
220 )
220 )
221 elif config == b'abort':
221 elif config == b'abort':
222 abortconflicts.add(f)
222 abortconflicts.add(f)
223 else:
223 else:
224 if config == b'warn':
224 if config == b'warn':
225 warnconflicts.add(f)
225 warnconflicts.add(f)
226 mresult.addfile(
226 mresult.addfile(
227 f,
227 f,
228 mergestatemod.ACTION_GET,
228 mergestatemod.ACTION_GET,
229 (fl2, True),
229 (fl2, True),
230 b'remote created',
230 b'remote created',
231 )
231 )
232
232
233 for f in sorted(abortconflicts):
233 for f in sorted(abortconflicts):
234 warn = repo.ui.warn
234 warn = repo.ui.warn
235 if f in pathconflicts:
235 if f in pathconflicts:
236 if repo.wvfs.isfileorlink(f):
236 if repo.wvfs.isfileorlink(f):
237 warn(_(b"%s: untracked file conflicts with directory\n") % f)
237 warn(_(b"%s: untracked file conflicts with directory\n") % f)
238 else:
238 else:
239 warn(_(b"%s: untracked directory conflicts with file\n") % f)
239 warn(_(b"%s: untracked directory conflicts with file\n") % f)
240 else:
240 else:
241 warn(_(b"%s: untracked file differs\n") % f)
241 warn(_(b"%s: untracked file differs\n") % f)
242 if abortconflicts:
242 if abortconflicts:
243 raise error.StateError(
243 raise error.StateError(
244 _(
244 _(
245 b"untracked files in working directory "
245 b"untracked files in working directory "
246 b"differ from files in requested revision"
246 b"differ from files in requested revision"
247 )
247 )
248 )
248 )
249
249
250 for f in sorted(warnconflicts):
250 for f in sorted(warnconflicts):
251 if repo.wvfs.isfileorlink(f):
251 if repo.wvfs.isfileorlink(f):
252 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
252 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
253 else:
253 else:
254 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
254 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
255
255
256 def transformargs(f, args):
256 def transformargs(f, args):
257 backup = (
257 backup = (
258 f in fileconflicts
258 f in fileconflicts
259 or pathconflicts
259 or pathconflicts
260 and (
260 and (
261 f in pathconflicts
261 f in pathconflicts
262 or any(p in pathconflicts for p in pathutil.finddirs(f))
262 or any(p in pathconflicts for p in pathutil.finddirs(f))
263 )
263 )
264 )
264 )
265 (flags,) = args
265 (flags,) = args
266 return (flags, backup)
266 return (flags, backup)
267
267
268 mresult.mapaction(
268 mresult.mapaction(
269 mergestatemod.ACTION_CREATED, mergestatemod.ACTION_GET, transformargs
269 mergestatemod.ACTION_CREATED, mergestatemod.ACTION_GET, transformargs
270 )
270 )
271
271
272
272
273 def _forgetremoved(wctx, mctx, branchmerge, mresult):
273 def _forgetremoved(wctx, mctx, branchmerge, mresult):
274 """
274 """
275 Forget removed files
275 Forget removed files
276
276
277 If we're jumping between revisions (as opposed to merging), and if
277 If we're jumping between revisions (as opposed to merging), and if
278 neither the working directory nor the target rev has the file,
278 neither the working directory nor the target rev has the file,
279 then we need to remove it from the dirstate, to prevent the
279 then we need to remove it from the dirstate, to prevent the
280 dirstate from listing the file when it is no longer in the
280 dirstate from listing the file when it is no longer in the
281 manifest.
281 manifest.
282
282
283 If we're merging, and the other revision has removed a file
283 If we're merging, and the other revision has removed a file
284 that is not present in the working directory, we need to mark it
284 that is not present in the working directory, we need to mark it
285 as removed.
285 as removed.
286 """
286 """
287
287
288 m = mergestatemod.ACTION_FORGET
288 m = mergestatemod.ACTION_FORGET
289 if branchmerge:
289 if branchmerge:
290 m = mergestatemod.ACTION_REMOVE
290 m = mergestatemod.ACTION_REMOVE
291 for f in wctx.deleted():
291 for f in wctx.deleted():
292 if f not in mctx:
292 if f not in mctx:
293 mresult.addfile(f, m, None, b"forget deleted")
293 mresult.addfile(f, m, None, b"forget deleted")
294
294
295 if not branchmerge:
295 if not branchmerge:
296 for f in wctx.removed():
296 for f in wctx.removed():
297 if f not in mctx:
297 if f not in mctx:
298 mresult.addfile(
298 mresult.addfile(
299 f,
299 f,
300 mergestatemod.ACTION_FORGET,
300 mergestatemod.ACTION_FORGET,
301 None,
301 None,
302 b"forget removed",
302 b"forget removed",
303 )
303 )
304
304
305
305
306 def _checkcollision(repo, wmf, mresult):
306 def _checkcollision(repo, wmf, mresult):
307 """
307 """
308 Check for case-folding collisions.
308 Check for case-folding collisions.
309 """
309 """
310 # If the repo is narrowed, filter out files outside the narrowspec.
310 # If the repo is narrowed, filter out files outside the narrowspec.
311 narrowmatch = repo.narrowmatch()
311 narrowmatch = repo.narrowmatch()
312 if not narrowmatch.always():
312 if not narrowmatch.always():
313 pmmf = set(wmf.walk(narrowmatch))
313 pmmf = set(wmf.walk(narrowmatch))
314 if mresult:
314 if mresult:
315 for f in list(mresult.files()):
315 for f in list(mresult.files()):
316 if not narrowmatch(f):
316 if not narrowmatch(f):
317 mresult.removefile(f)
317 mresult.removefile(f)
318 else:
318 else:
319 # build provisional merged manifest up
319 # build provisional merged manifest up
320 pmmf = set(wmf)
320 pmmf = set(wmf)
321
321
322 if mresult:
322 if mresult:
323 # KEEP and EXEC are no-op
323 # KEEP and EXEC are no-op
324 for f in mresult.files(
324 for f in mresult.files(
325 (
325 (
326 mergestatemod.ACTION_ADD,
326 mergestatemod.ACTION_ADD,
327 mergestatemod.ACTION_ADD_MODIFIED,
327 mergestatemod.ACTION_ADD_MODIFIED,
328 mergestatemod.ACTION_FORGET,
328 mergestatemod.ACTION_FORGET,
329 mergestatemod.ACTION_GET,
329 mergestatemod.ACTION_GET,
330 mergestatemod.ACTION_CHANGED_DELETED,
330 mergestatemod.ACTION_CHANGED_DELETED,
331 mergestatemod.ACTION_DELETED_CHANGED,
331 mergestatemod.ACTION_DELETED_CHANGED,
332 )
332 )
333 ):
333 ):
334 pmmf.add(f)
334 pmmf.add(f)
335 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
335 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
336 pmmf.discard(f)
336 pmmf.discard(f)
337 for f, args, msg in mresult.getactions(
337 for f, args, msg in mresult.getactions(
338 [mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]
338 [mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]
339 ):
339 ):
340 f2, flags = args
340 f2, flags = args
341 pmmf.discard(f2)
341 pmmf.discard(f2)
342 pmmf.add(f)
342 pmmf.add(f)
343 for f in mresult.files((mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,)):
343 for f in mresult.files((mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,)):
344 pmmf.add(f)
344 pmmf.add(f)
345 for f, args, msg in mresult.getactions([mergestatemod.ACTION_MERGE]):
345 for f, args, msg in mresult.getactions([mergestatemod.ACTION_MERGE]):
346 f1, f2, fa, move, anc = args
346 f1, f2, fa, move, anc = args
347 if move:
347 if move:
348 pmmf.discard(f1)
348 pmmf.discard(f1)
349 pmmf.add(f)
349 pmmf.add(f)
350
350
351 # check case-folding collision in provisional merged manifest
351 # check case-folding collision in provisional merged manifest
352 foldmap = {}
352 foldmap = {}
353 for f in pmmf:
353 for f in pmmf:
354 fold = util.normcase(f)
354 fold = util.normcase(f)
355 if fold in foldmap:
355 if fold in foldmap:
356 msg = _(b"case-folding collision between %s and %s")
356 msg = _(b"case-folding collision between %s and %s")
357 msg %= (f, foldmap[fold])
357 msg %= (f, foldmap[fold])
358 raise error.StateError(msg)
358 raise error.StateError(msg)
359 foldmap[fold] = f
359 foldmap[fold] = f
360
360
361 # check case-folding of directories
361 # check case-folding of directories
362 foldprefix = unfoldprefix = lastfull = b''
362 foldprefix = unfoldprefix = lastfull = b''
363 for fold, f in sorted(foldmap.items()):
363 for fold, f in sorted(foldmap.items()):
364 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
364 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
365 # the folded prefix matches but actual casing is different
365 # the folded prefix matches but actual casing is different
366 msg = _(b"case-folding collision between %s and directory of %s")
366 msg = _(b"case-folding collision between %s and directory of %s")
367 msg %= (lastfull, f)
367 msg %= (lastfull, f)
368 raise error.StateError(msg)
368 raise error.StateError(msg)
369 foldprefix = fold + b'/'
369 foldprefix = fold + b'/'
370 unfoldprefix = f + b'/'
370 unfoldprefix = f + b'/'
371 lastfull = f
371 lastfull = f
372
372
373
373
374 def _filesindirs(repo, manifest, dirs):
374 def _filesindirs(repo, manifest, dirs):
375 """
375 """
376 Generator that yields pairs of all the files in the manifest that are found
376 Generator that yields pairs of all the files in the manifest that are found
377 inside the directories listed in dirs, and which directory they are found
377 inside the directories listed in dirs, and which directory they are found
378 in.
378 in.
379 """
379 """
380 for f in manifest:
380 for f in manifest:
381 for p in pathutil.finddirs(f):
381 for p in pathutil.finddirs(f):
382 if p in dirs:
382 if p in dirs:
383 yield f, p
383 yield f, p
384 break
384 break
385
385
386
386
387 def checkpathconflicts(repo, wctx, mctx, mresult):
387 def checkpathconflicts(repo, wctx, mctx, mresult):
388 """
388 """
389 Check if any actions introduce path conflicts in the repository, updating
389 Check if any actions introduce path conflicts in the repository, updating
390 actions to record or handle the path conflict accordingly.
390 actions to record or handle the path conflict accordingly.
391 """
391 """
392 mf = wctx.manifest()
392 mf = wctx.manifest()
393
393
394 # The set of local files that conflict with a remote directory.
394 # The set of local files that conflict with a remote directory.
395 localconflicts = set()
395 localconflicts = set()
396
396
397 # The set of directories that conflict with a remote file, and so may cause
397 # The set of directories that conflict with a remote file, and so may cause
398 # conflicts if they still contain any files after the merge.
398 # conflicts if they still contain any files after the merge.
399 remoteconflicts = set()
399 remoteconflicts = set()
400
400
401 # The set of directories that appear as both a file and a directory in the
401 # The set of directories that appear as both a file and a directory in the
402 # remote manifest. These indicate an invalid remote manifest, which
402 # remote manifest. These indicate an invalid remote manifest, which
403 # can't be updated to cleanly.
403 # can't be updated to cleanly.
404 invalidconflicts = set()
404 invalidconflicts = set()
405
405
406 # The set of directories that contain files that are being created.
406 # The set of directories that contain files that are being created.
407 createdfiledirs = set()
407 createdfiledirs = set()
408
408
409 # The set of files deleted by all the actions.
409 # The set of files deleted by all the actions.
410 deletedfiles = set()
410 deletedfiles = set()
411
411
412 for f in mresult.files(
412 for f in mresult.files(
413 (
413 (
414 mergestatemod.ACTION_CREATED,
414 mergestatemod.ACTION_CREATED,
415 mergestatemod.ACTION_DELETED_CHANGED,
415 mergestatemod.ACTION_DELETED_CHANGED,
416 mergestatemod.ACTION_MERGE,
416 mergestatemod.ACTION_MERGE,
417 mergestatemod.ACTION_CREATED_MERGE,
417 mergestatemod.ACTION_CREATED_MERGE,
418 )
418 )
419 ):
419 ):
420 # This action may create a new local file.
420 # This action may create a new local file.
421 createdfiledirs.update(pathutil.finddirs(f))
421 createdfiledirs.update(pathutil.finddirs(f))
422 if mf.hasdir(f):
422 if mf.hasdir(f):
423 # The file aliases a local directory. This might be ok if all
423 # The file aliases a local directory. This might be ok if all
424 # the files in the local directory are being deleted. This
424 # the files in the local directory are being deleted. This
425 # will be checked once we know what all the deleted files are.
425 # will be checked once we know what all the deleted files are.
426 remoteconflicts.add(f)
426 remoteconflicts.add(f)
427 # Track the names of all deleted files.
427 # Track the names of all deleted files.
428 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
428 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
429 deletedfiles.add(f)
429 deletedfiles.add(f)
430 for f, args, msg in mresult.getactions((mergestatemod.ACTION_MERGE,)):
430 for f, args, msg in mresult.getactions((mergestatemod.ACTION_MERGE,)):
431 f1, f2, fa, move, anc = args
431 f1, f2, fa, move, anc = args
432 if move:
432 if move:
433 deletedfiles.add(f1)
433 deletedfiles.add(f1)
434 for f, args, msg in mresult.getactions(
434 for f, args, msg in mresult.getactions(
435 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
435 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
436 ):
436 ):
437 f2, flags = args
437 f2, flags = args
438 deletedfiles.add(f2)
438 deletedfiles.add(f2)
439
439
440 # Check all directories that contain created files for path conflicts.
440 # Check all directories that contain created files for path conflicts.
441 for p in createdfiledirs:
441 for p in createdfiledirs:
442 if p in mf:
442 if p in mf:
443 if p in mctx:
443 if p in mctx:
444 # A file is in a directory which aliases both a local
444 # A file is in a directory which aliases both a local
445 # and a remote file. This is an internal inconsistency
445 # and a remote file. This is an internal inconsistency
446 # within the remote manifest.
446 # within the remote manifest.
447 invalidconflicts.add(p)
447 invalidconflicts.add(p)
448 else:
448 else:
449 # A file is in a directory which aliases a local file.
449 # A file is in a directory which aliases a local file.
450 # We will need to rename the local file.
450 # We will need to rename the local file.
451 localconflicts.add(p)
451 localconflicts.add(p)
452 pd = mresult.getfile(p)
452 pd = mresult.getfile(p)
453 if pd and pd[0] in (
453 if pd and pd[0] in (
454 mergestatemod.ACTION_CREATED,
454 mergestatemod.ACTION_CREATED,
455 mergestatemod.ACTION_DELETED_CHANGED,
455 mergestatemod.ACTION_DELETED_CHANGED,
456 mergestatemod.ACTION_MERGE,
456 mergestatemod.ACTION_MERGE,
457 mergestatemod.ACTION_CREATED_MERGE,
457 mergestatemod.ACTION_CREATED_MERGE,
458 ):
458 ):
459 # The file is in a directory which aliases a remote file.
459 # The file is in a directory which aliases a remote file.
460 # This is an internal inconsistency within the remote
460 # This is an internal inconsistency within the remote
461 # manifest.
461 # manifest.
462 invalidconflicts.add(p)
462 invalidconflicts.add(p)
463
463
464 # Rename all local conflicting files that have not been deleted.
464 # Rename all local conflicting files that have not been deleted.
465 for p in localconflicts:
465 for p in localconflicts:
466 if p not in deletedfiles:
466 if p not in deletedfiles:
467 ctxname = bytes(wctx).rstrip(b'+')
467 ctxname = bytes(wctx).rstrip(b'+')
468 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
468 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
469 porig = wctx[p].copysource() or p
469 porig = wctx[p].copysource() or p
470 mresult.addfile(
470 mresult.addfile(
471 pnew,
471 pnew,
472 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
472 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
473 (p, porig),
473 (p, porig),
474 b'local path conflict',
474 b'local path conflict',
475 )
475 )
476 mresult.addfile(
476 mresult.addfile(
477 p,
477 p,
478 mergestatemod.ACTION_PATH_CONFLICT,
478 mergestatemod.ACTION_PATH_CONFLICT,
479 (pnew, b'l'),
479 (pnew, b'l'),
480 b'path conflict',
480 b'path conflict',
481 )
481 )
482
482
483 if remoteconflicts:
483 if remoteconflicts:
484 # Check if all files in the conflicting directories have been removed.
484 # Check if all files in the conflicting directories have been removed.
485 ctxname = bytes(mctx).rstrip(b'+')
485 ctxname = bytes(mctx).rstrip(b'+')
486 for f, p in _filesindirs(repo, mf, remoteconflicts):
486 for f, p in _filesindirs(repo, mf, remoteconflicts):
487 if f not in deletedfiles:
487 if f not in deletedfiles:
488 m, args, msg = mresult.getfile(p)
488 m, args, msg = mresult.getfile(p)
489 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
489 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
490 if m in (
490 if m in (
491 mergestatemod.ACTION_DELETED_CHANGED,
491 mergestatemod.ACTION_DELETED_CHANGED,
492 mergestatemod.ACTION_MERGE,
492 mergestatemod.ACTION_MERGE,
493 ):
493 ):
494 # Action was merge, just update target.
494 # Action was merge, just update target.
495 mresult.addfile(pnew, m, args, msg)
495 mresult.addfile(pnew, m, args, msg)
496 else:
496 else:
497 # Action was create, change to renamed get action.
497 # Action was create, change to renamed get action.
498 fl = args[0]
498 fl = args[0]
499 mresult.addfile(
499 mresult.addfile(
500 pnew,
500 pnew,
501 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
501 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
502 (p, fl),
502 (p, fl),
503 b'remote path conflict',
503 b'remote path conflict',
504 )
504 )
505 mresult.addfile(
505 mresult.addfile(
506 p,
506 p,
507 mergestatemod.ACTION_PATH_CONFLICT,
507 mergestatemod.ACTION_PATH_CONFLICT,
508 (pnew, b'r'),
508 (pnew, b'r'),
509 b'path conflict',
509 b'path conflict',
510 )
510 )
511 remoteconflicts.remove(p)
511 remoteconflicts.remove(p)
512 break
512 break
513
513
514 if invalidconflicts:
514 if invalidconflicts:
515 for p in invalidconflicts:
515 for p in invalidconflicts:
516 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
516 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
517 raise error.StateError(
517 raise error.StateError(
518 _(b"destination manifest contains path conflicts")
518 _(b"destination manifest contains path conflicts")
519 )
519 )
520
520
521
521
522 def _filternarrowactions(narrowmatch, branchmerge, mresult):
522 def _filternarrowactions(narrowmatch, branchmerge, mresult):
523 """
523 """
524 Filters out actions that can ignored because the repo is narrowed.
524 Filters out actions that can ignored because the repo is narrowed.
525
525
526 Raise an exception if the merge cannot be completed because the repo is
526 Raise an exception if the merge cannot be completed because the repo is
527 narrowed.
527 narrowed.
528 """
528 """
529 # We mutate the items in the dict during iteration, so iterate
529 # We mutate the items in the dict during iteration, so iterate
530 # over a copy.
530 # over a copy.
531 for f, action in mresult.filemap():
531 for f, action in list(mresult.filemap()):
532 if narrowmatch(f):
532 if narrowmatch(f):
533 pass
533 pass
534 elif not branchmerge:
534 elif not branchmerge:
535 mresult.removefile(f) # just updating, ignore changes outside clone
535 mresult.removefile(f) # just updating, ignore changes outside clone
536 elif action[0].no_op:
536 elif action[0].no_op:
537 mresult.removefile(f) # merge does not affect file
537 mresult.removefile(f) # merge does not affect file
538 elif action[0].narrow_safe:
538 elif action[0].narrow_safe:
539 if not f.endswith(b'/'):
539 if not f.endswith(b'/'):
540 mresult.removefile(f) # merge won't affect on-disk files
540 mresult.removefile(f) # merge won't affect on-disk files
541
541
542 mresult.addcommitinfo(
542 mresult.addcommitinfo(
543 f, b'outside-narrow-merge-action', action[0].changes
543 f, b'outside-narrow-merge-action', action[0].changes
544 )
544 )
545 else: # TODO: handle the tree case
545 else: # TODO: handle the tree case
546 msg = _(
546 msg = _(
547 b'merge affects file \'%s\' outside narrow, '
547 b'merge affects file \'%s\' outside narrow, '
548 b'which is not yet supported'
548 b'which is not yet supported'
549 )
549 )
550 hint = _(b'merging in the other direction may work')
550 hint = _(b'merging in the other direction may work')
551 raise error.Abort(msg % f, hint=hint)
551 raise error.Abort(msg % f, hint=hint)
552 else:
552 else:
553 msg = _(b'conflict in file \'%s\' is outside narrow clone')
553 msg = _(b'conflict in file \'%s\' is outside narrow clone')
554 raise error.StateError(msg % f)
554 raise error.StateError(msg % f)
555
555
556
556
557 class mergeresult:
557 class mergeresult:
558 """An object representing result of merging manifests.
558 """An object representing result of merging manifests.
559
559
560 It has information about what actions need to be performed on dirstate
560 It has information about what actions need to be performed on dirstate
561 mapping of divergent renames and other such cases."""
561 mapping of divergent renames and other such cases."""
562
562
563 def __init__(self):
563 def __init__(self):
564 """
564 """
565 filemapping: dict of filename as keys and action related info as values
565 filemapping: dict of filename as keys and action related info as values
566 diverge: mapping of source name -> list of dest name for
566 diverge: mapping of source name -> list of dest name for
567 divergent renames
567 divergent renames
568 renamedelete: mapping of source name -> list of destinations for files
568 renamedelete: mapping of source name -> list of destinations for files
569 deleted on one side and renamed on other.
569 deleted on one side and renamed on other.
570 commitinfo: dict containing data which should be used on commit
570 commitinfo: dict containing data which should be used on commit
571 contains a filename -> info mapping
571 contains a filename -> info mapping
572 actionmapping: dict of action names as keys and values are dict of
572 actionmapping: dict of action names as keys and values are dict of
573 filename as key and related data as values
573 filename as key and related data as values
574 """
574 """
575 self._filemapping = {}
575 self._filemapping = {}
576 self._diverge = {}
576 self._diverge = {}
577 self._renamedelete = {}
577 self._renamedelete = {}
578 self._commitinfo = collections.defaultdict(dict)
578 self._commitinfo = collections.defaultdict(dict)
579 self._actionmapping = collections.defaultdict(dict)
579 self._actionmapping = collections.defaultdict(dict)
580
580
581 def updatevalues(self, diverge, renamedelete):
581 def updatevalues(self, diverge, renamedelete):
582 self._diverge = diverge
582 self._diverge = diverge
583 self._renamedelete = renamedelete
583 self._renamedelete = renamedelete
584
584
585 def addfile(self, filename, action, data, message):
585 def addfile(self, filename, action, data, message):
586 """adds a new file to the mergeresult object
586 """adds a new file to the mergeresult object
587
587
588 filename: file which we are adding
588 filename: file which we are adding
589 action: one of mergestatemod.ACTION_*
589 action: one of mergestatemod.ACTION_*
590 data: a tuple of information like fctx and ctx related to this merge
590 data: a tuple of information like fctx and ctx related to this merge
591 message: a message about the merge
591 message: a message about the merge
592 """
592 """
593 # if the file already existed, we need to delete it's old
593 # if the file already existed, we need to delete it's old
594 # entry form _actionmapping too
594 # entry form _actionmapping too
595 if filename in self._filemapping:
595 if filename in self._filemapping:
596 a, d, m = self._filemapping[filename]
596 a, d, m = self._filemapping[filename]
597 del self._actionmapping[a][filename]
597 del self._actionmapping[a][filename]
598
598
599 self._filemapping[filename] = (action, data, message)
599 self._filemapping[filename] = (action, data, message)
600 self._actionmapping[action][filename] = (data, message)
600 self._actionmapping[action][filename] = (data, message)
601
601
602 def mapaction(self, actionfrom, actionto, transform):
602 def mapaction(self, actionfrom, actionto, transform):
603 """changes all occurrences of action `actionfrom` into `actionto`,
603 """changes all occurrences of action `actionfrom` into `actionto`,
604 transforming its args with the function `transform`.
604 transforming its args with the function `transform`.
605 """
605 """
606 orig = self._actionmapping[actionfrom]
606 orig = self._actionmapping[actionfrom]
607 del self._actionmapping[actionfrom]
607 del self._actionmapping[actionfrom]
608 dest = self._actionmapping[actionto]
608 dest = self._actionmapping[actionto]
609 for f, (data, msg) in orig.items():
609 for f, (data, msg) in orig.items():
610 data = transform(f, data)
610 data = transform(f, data)
611 self._filemapping[f] = (actionto, data, msg)
611 self._filemapping[f] = (actionto, data, msg)
612 dest[f] = (data, msg)
612 dest[f] = (data, msg)
613
613
614 def getfile(self, filename, default_return=None):
614 def getfile(self, filename, default_return=None):
615 """returns (action, args, msg) about this file
615 """returns (action, args, msg) about this file
616
616
617 returns default_return if the file is not present"""
617 returns default_return if the file is not present"""
618 if filename in self._filemapping:
618 if filename in self._filemapping:
619 return self._filemapping[filename]
619 return self._filemapping[filename]
620 return default_return
620 return default_return
621
621
622 def files(self, actions=None):
622 def files(self, actions=None):
623 """returns files on which provided action needs to perfromed
623 """returns files on which provided action needs to perfromed
624
624
625 If actions is None, all files are returned
625 If actions is None, all files are returned
626 """
626 """
627 # TODO: think whether we should return renamedelete and
627 # TODO: think whether we should return renamedelete and
628 # diverge filenames also
628 # diverge filenames also
629 if actions is None:
629 if actions is None:
630 for f in self._filemapping:
630 for f in self._filemapping:
631 yield f
631 yield f
632
632
633 else:
633 else:
634 for a in actions:
634 for a in actions:
635 for f in self._actionmapping[a]:
635 for f in self._actionmapping[a]:
636 yield f
636 yield f
637
637
638 def removefile(self, filename):
638 def removefile(self, filename):
639 """removes a file from the mergeresult object as the file might
639 """removes a file from the mergeresult object as the file might
640 not merging anymore"""
640 not merging anymore"""
641 action, data, message = self._filemapping[filename]
641 action, data, message = self._filemapping[filename]
642 del self._filemapping[filename]
642 del self._filemapping[filename]
643 del self._actionmapping[action][filename]
643 del self._actionmapping[action][filename]
644
644
645 def getactions(self, actions, sort=False):
645 def getactions(self, actions, sort=False):
646 """get list of files which are marked with these actions
646 """get list of files which are marked with these actions
647 if sort is true, files for each action is sorted and then added
647 if sort is true, files for each action is sorted and then added
648
648
649 Returns a list of tuple of form (filename, data, message)
649 Returns a list of tuple of form (filename, data, message)
650 """
650 """
651 for a in actions:
651 for a in actions:
652 if sort:
652 if sort:
653 for f in sorted(self._actionmapping[a]):
653 for f in sorted(self._actionmapping[a]):
654 args, msg = self._actionmapping[a][f]
654 args, msg = self._actionmapping[a][f]
655 yield f, args, msg
655 yield f, args, msg
656 else:
656 else:
657 for f, (args, msg) in self._actionmapping[a].items():
657 for f, (args, msg) in self._actionmapping[a].items():
658 yield f, args, msg
658 yield f, args, msg
659
659
660 def len(self, actions=None):
660 def len(self, actions=None):
661 """returns number of files which needs actions
661 """returns number of files which needs actions
662
662
663 if actions is passed, total of number of files in that action
663 if actions is passed, total of number of files in that action
664 only is returned"""
664 only is returned"""
665
665
666 if actions is None:
666 if actions is None:
667 return len(self._filemapping)
667 return len(self._filemapping)
668
668
669 return sum(len(self._actionmapping[a]) for a in actions)
669 return sum(len(self._actionmapping[a]) for a in actions)
670
670
671 def filemap(self, sort=False):
671 def filemap(self, sort=False):
672 if sorted:
672 if sort:
673 for key, val in sorted(self._filemapping.items()):
673 for key, val in sorted(self._filemapping.items()):
674 yield key, val
674 yield key, val
675 else:
675 else:
676 for key, val in self._filemapping.items():
676 for key, val in self._filemapping.items():
677 yield key, val
677 yield key, val
678
678
679 def addcommitinfo(self, filename, key, value):
679 def addcommitinfo(self, filename, key, value):
680 """adds key-value information about filename which will be required
680 """adds key-value information about filename which will be required
681 while committing this merge"""
681 while committing this merge"""
682 self._commitinfo[filename][key] = value
682 self._commitinfo[filename][key] = value
683
683
684 @property
684 @property
685 def diverge(self):
685 def diverge(self):
686 return self._diverge
686 return self._diverge
687
687
688 @property
688 @property
689 def renamedelete(self):
689 def renamedelete(self):
690 return self._renamedelete
690 return self._renamedelete
691
691
692 @property
692 @property
693 def commitinfo(self):
693 def commitinfo(self):
694 return self._commitinfo
694 return self._commitinfo
695
695
696 @property
696 @property
697 def actionsdict(self):
697 def actionsdict(self):
698 """returns a dictionary of actions to be perfomed with action as key
698 """returns a dictionary of actions to be perfomed with action as key
699 and a list of files and related arguments as values"""
699 and a list of files and related arguments as values"""
700 res = collections.defaultdict(list)
700 res = collections.defaultdict(list)
701 for a, d in self._actionmapping.items():
701 for a, d in self._actionmapping.items():
702 for f, (args, msg) in d.items():
702 for f, (args, msg) in d.items():
703 res[a].append((f, args, msg))
703 res[a].append((f, args, msg))
704 return res
704 return res
705
705
706 def setactions(self, actions):
706 def setactions(self, actions):
707 self._filemapping = actions
707 self._filemapping = actions
708 self._actionmapping = collections.defaultdict(dict)
708 self._actionmapping = collections.defaultdict(dict)
709 for f, (act, data, msg) in self._filemapping.items():
709 for f, (act, data, msg) in self._filemapping.items():
710 self._actionmapping[act][f] = data, msg
710 self._actionmapping[act][f] = data, msg
711
711
712 def hasconflicts(self):
712 def hasconflicts(self):
713 """tells whether this merge resulted in some actions which can
713 """tells whether this merge resulted in some actions which can
714 result in conflicts or not"""
714 result in conflicts or not"""
715 for a in self._actionmapping.keys():
715 for a in self._actionmapping.keys():
716 if (
716 if (
717 a
717 a
718 not in (
718 not in (
719 mergestatemod.ACTION_GET,
719 mergestatemod.ACTION_GET,
720 mergestatemod.ACTION_EXEC,
720 mergestatemod.ACTION_EXEC,
721 mergestatemod.ACTION_REMOVE,
721 mergestatemod.ACTION_REMOVE,
722 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
722 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
723 )
723 )
724 and self._actionmapping[a]
724 and self._actionmapping[a]
725 and not a.no_op
725 and not a.no_op
726 ):
726 ):
727 return True
727 return True
728
728
729 return False
729 return False
730
730
731
731
732 def manifestmerge(
732 def manifestmerge(
733 repo,
733 repo,
734 wctx,
734 wctx,
735 p2,
735 p2,
736 pa,
736 pa,
737 branchmerge,
737 branchmerge,
738 force,
738 force,
739 matcher,
739 matcher,
740 acceptremote,
740 acceptremote,
741 followcopies,
741 followcopies,
742 forcefulldiff=False,
742 forcefulldiff=False,
743 ):
743 ):
744 """
744 """
745 Merge wctx and p2 with ancestor pa and generate merge action list
745 Merge wctx and p2 with ancestor pa and generate merge action list
746
746
747 branchmerge and force are as passed in to update
747 branchmerge and force are as passed in to update
748 matcher = matcher to filter file lists
748 matcher = matcher to filter file lists
749 acceptremote = accept the incoming changes without prompting
749 acceptremote = accept the incoming changes without prompting
750
750
751 Returns an object of mergeresult class
751 Returns an object of mergeresult class
752 """
752 """
753 mresult = mergeresult()
753 mresult = mergeresult()
754 if matcher is not None and matcher.always():
754 if matcher is not None and matcher.always():
755 matcher = None
755 matcher = None
756
756
757 # manifests fetched in order are going to be faster, so prime the caches
757 # manifests fetched in order are going to be faster, so prime the caches
758 [
758 [
759 x.manifest()
759 x.manifest()
760 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
760 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
761 ]
761 ]
762
762
763 branch_copies1 = copies.branch_copies()
763 branch_copies1 = copies.branch_copies()
764 branch_copies2 = copies.branch_copies()
764 branch_copies2 = copies.branch_copies()
765 diverge = {}
765 diverge = {}
766 # information from merge which is needed at commit time
766 # information from merge which is needed at commit time
767 # for example choosing filelog of which parent to commit
767 # for example choosing filelog of which parent to commit
768 # TODO: use specific constants in future for this mapping
768 # TODO: use specific constants in future for this mapping
769 if followcopies:
769 if followcopies:
770 branch_copies1, branch_copies2, diverge = copies.mergecopies(
770 branch_copies1, branch_copies2, diverge = copies.mergecopies(
771 repo, wctx, p2, pa
771 repo, wctx, p2, pa
772 )
772 )
773
773
774 boolbm = pycompat.bytestr(bool(branchmerge))
774 boolbm = pycompat.bytestr(bool(branchmerge))
775 boolf = pycompat.bytestr(bool(force))
775 boolf = pycompat.bytestr(bool(force))
776 boolm = pycompat.bytestr(bool(matcher))
776 boolm = pycompat.bytestr(bool(matcher))
777 repo.ui.note(_(b"resolving manifests\n"))
777 repo.ui.note(_(b"resolving manifests\n"))
778 repo.ui.debug(
778 repo.ui.debug(
779 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
779 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
780 )
780 )
781 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
781 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
782
782
783 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
783 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
784 copied1 = set(branch_copies1.copy.values())
784 copied1 = set(branch_copies1.copy.values())
785 copied1.update(branch_copies1.movewithdir.values())
785 copied1.update(branch_copies1.movewithdir.values())
786 copied2 = set(branch_copies2.copy.values())
786 copied2 = set(branch_copies2.copy.values())
787 copied2.update(branch_copies2.movewithdir.values())
787 copied2.update(branch_copies2.movewithdir.values())
788
788
789 if b'.hgsubstate' in m1 and wctx.rev() is None:
789 if b'.hgsubstate' in m1 and wctx.rev() is None:
790 # Check whether sub state is modified, and overwrite the manifest
790 # Check whether sub state is modified, and overwrite the manifest
791 # to flag the change. If wctx is a committed revision, we shouldn't
791 # to flag the change. If wctx is a committed revision, we shouldn't
792 # care for the dirty state of the working directory.
792 # care for the dirty state of the working directory.
793 if any(wctx.sub(s).dirty() for s in wctx.substate):
793 if any(wctx.sub(s).dirty() for s in wctx.substate):
794 m1[b'.hgsubstate'] = repo.nodeconstants.modifiednodeid
794 m1[b'.hgsubstate'] = repo.nodeconstants.modifiednodeid
795
795
796 # Don't use m2-vs-ma optimization if:
796 # Don't use m2-vs-ma optimization if:
797 # - ma is the same as m1 or m2, which we're just going to diff again later
797 # - ma is the same as m1 or m2, which we're just going to diff again later
798 # - The caller specifically asks for a full diff, which is useful during bid
798 # - The caller specifically asks for a full diff, which is useful during bid
799 # merge.
799 # merge.
800 # - we are tracking salvaged files specifically hence should process all
800 # - we are tracking salvaged files specifically hence should process all
801 # files
801 # files
802 if (
802 if (
803 pa not in ([wctx, p2] + wctx.parents())
803 pa not in ([wctx, p2] + wctx.parents())
804 and not forcefulldiff
804 and not forcefulldiff
805 and not (
805 and not (
806 repo.ui.configbool(b'experimental', b'merge-track-salvaged')
806 repo.ui.configbool(b'experimental', b'merge-track-salvaged')
807 or repo.filecopiesmode == b'changeset-sidedata'
807 or repo.filecopiesmode == b'changeset-sidedata'
808 )
808 )
809 ):
809 ):
810 # Identify which files are relevant to the merge, so we can limit the
810 # Identify which files are relevant to the merge, so we can limit the
811 # total m1-vs-m2 diff to just those files. This has significant
811 # total m1-vs-m2 diff to just those files. This has significant
812 # performance benefits in large repositories.
812 # performance benefits in large repositories.
813 relevantfiles = set(ma.diff(m2).keys())
813 relevantfiles = set(ma.diff(m2).keys())
814
814
815 # For copied and moved files, we need to add the source file too.
815 # For copied and moved files, we need to add the source file too.
816 for copykey, copyvalue in branch_copies1.copy.items():
816 for copykey, copyvalue in branch_copies1.copy.items():
817 if copyvalue in relevantfiles:
817 if copyvalue in relevantfiles:
818 relevantfiles.add(copykey)
818 relevantfiles.add(copykey)
819 for movedirkey in branch_copies1.movewithdir:
819 for movedirkey in branch_copies1.movewithdir:
820 relevantfiles.add(movedirkey)
820 relevantfiles.add(movedirkey)
821 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
821 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
822 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
822 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
823
823
824 diff = m1.diff(m2, match=matcher)
824 diff = m1.diff(m2, match=matcher)
825
825
826 for f, ((n1, fl1), (n2, fl2)) in diff.items():
826 for f, ((n1, fl1), (n2, fl2)) in diff.items():
827 if n1 and n2: # file exists on both local and remote side
827 if n1 and n2: # file exists on both local and remote side
828 if f not in ma:
828 if f not in ma:
829 # TODO: what if they're renamed from different sources?
829 # TODO: what if they're renamed from different sources?
830 fa = branch_copies1.copy.get(
830 fa = branch_copies1.copy.get(
831 f, None
831 f, None
832 ) or branch_copies2.copy.get(f, None)
832 ) or branch_copies2.copy.get(f, None)
833 args, msg = None, None
833 args, msg = None, None
834 if fa is not None:
834 if fa is not None:
835 args = (f, f, fa, False, pa.node())
835 args = (f, f, fa, False, pa.node())
836 msg = b'both renamed from %s' % fa
836 msg = b'both renamed from %s' % fa
837 else:
837 else:
838 args = (f, f, None, False, pa.node())
838 args = (f, f, None, False, pa.node())
839 msg = b'both created'
839 msg = b'both created'
840 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
840 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
841 elif f in branch_copies1.copy:
841 elif f in branch_copies1.copy:
842 fa = branch_copies1.copy[f]
842 fa = branch_copies1.copy[f]
843 mresult.addfile(
843 mresult.addfile(
844 f,
844 f,
845 mergestatemod.ACTION_MERGE,
845 mergestatemod.ACTION_MERGE,
846 (f, fa, fa, False, pa.node()),
846 (f, fa, fa, False, pa.node()),
847 b'local replaced from %s' % fa,
847 b'local replaced from %s' % fa,
848 )
848 )
849 elif f in branch_copies2.copy:
849 elif f in branch_copies2.copy:
850 fa = branch_copies2.copy[f]
850 fa = branch_copies2.copy[f]
851 mresult.addfile(
851 mresult.addfile(
852 f,
852 f,
853 mergestatemod.ACTION_MERGE,
853 mergestatemod.ACTION_MERGE,
854 (fa, f, fa, False, pa.node()),
854 (fa, f, fa, False, pa.node()),
855 b'other replaced from %s' % fa,
855 b'other replaced from %s' % fa,
856 )
856 )
857 else:
857 else:
858 a = ma[f]
858 a = ma[f]
859 fla = ma.flags(f)
859 fla = ma.flags(f)
860 nol = b'l' not in fl1 + fl2 + fla
860 nol = b'l' not in fl1 + fl2 + fla
861 if n2 == a and fl2 == fla:
861 if n2 == a and fl2 == fla:
862 mresult.addfile(
862 mresult.addfile(
863 f,
863 f,
864 mergestatemod.ACTION_KEEP,
864 mergestatemod.ACTION_KEEP,
865 (),
865 (),
866 b'remote unchanged',
866 b'remote unchanged',
867 )
867 )
868 elif n1 == a and fl1 == fla: # local unchanged - use remote
868 elif n1 == a and fl1 == fla: # local unchanged - use remote
869 if n1 == n2: # optimization: keep local content
869 if n1 == n2: # optimization: keep local content
870 mresult.addfile(
870 mresult.addfile(
871 f,
871 f,
872 mergestatemod.ACTION_EXEC,
872 mergestatemod.ACTION_EXEC,
873 (fl2,),
873 (fl2,),
874 b'update permissions',
874 b'update permissions',
875 )
875 )
876 else:
876 else:
877 mresult.addfile(
877 mresult.addfile(
878 f,
878 f,
879 mergestatemod.ACTION_GET,
879 mergestatemod.ACTION_GET,
880 (fl2, False),
880 (fl2, False),
881 b'remote is newer',
881 b'remote is newer',
882 )
882 )
883 if branchmerge:
883 if branchmerge:
884 mresult.addcommitinfo(
884 mresult.addcommitinfo(
885 f, b'filenode-source', b'other'
885 f, b'filenode-source', b'other'
886 )
886 )
887 elif nol and n2 == a: # remote only changed 'x'
887 elif nol and n2 == a: # remote only changed 'x'
888 mresult.addfile(
888 mresult.addfile(
889 f,
889 f,
890 mergestatemod.ACTION_EXEC,
890 mergestatemod.ACTION_EXEC,
891 (fl2,),
891 (fl2,),
892 b'update permissions',
892 b'update permissions',
893 )
893 )
894 elif nol and n1 == a: # local only changed 'x'
894 elif nol and n1 == a: # local only changed 'x'
895 mresult.addfile(
895 mresult.addfile(
896 f,
896 f,
897 mergestatemod.ACTION_GET,
897 mergestatemod.ACTION_GET,
898 (fl1, False),
898 (fl1, False),
899 b'remote is newer',
899 b'remote is newer',
900 )
900 )
901 if branchmerge:
901 if branchmerge:
902 mresult.addcommitinfo(f, b'filenode-source', b'other')
902 mresult.addcommitinfo(f, b'filenode-source', b'other')
903 else: # both changed something
903 else: # both changed something
904 mresult.addfile(
904 mresult.addfile(
905 f,
905 f,
906 mergestatemod.ACTION_MERGE,
906 mergestatemod.ACTION_MERGE,
907 (f, f, f, False, pa.node()),
907 (f, f, f, False, pa.node()),
908 b'versions differ',
908 b'versions differ',
909 )
909 )
910 elif n1: # file exists only on local side
910 elif n1: # file exists only on local side
911 if f in copied2:
911 if f in copied2:
912 pass # we'll deal with it on m2 side
912 pass # we'll deal with it on m2 side
913 elif (
913 elif (
914 f in branch_copies1.movewithdir
914 f in branch_copies1.movewithdir
915 ): # directory rename, move local
915 ): # directory rename, move local
916 f2 = branch_copies1.movewithdir[f]
916 f2 = branch_copies1.movewithdir[f]
917 if f2 in m2:
917 if f2 in m2:
918 mresult.addfile(
918 mresult.addfile(
919 f2,
919 f2,
920 mergestatemod.ACTION_MERGE,
920 mergestatemod.ACTION_MERGE,
921 (f, f2, None, True, pa.node()),
921 (f, f2, None, True, pa.node()),
922 b'remote directory rename, both created',
922 b'remote directory rename, both created',
923 )
923 )
924 else:
924 else:
925 mresult.addfile(
925 mresult.addfile(
926 f2,
926 f2,
927 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
927 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
928 (f, fl1),
928 (f, fl1),
929 b'remote directory rename - move from %s' % f,
929 b'remote directory rename - move from %s' % f,
930 )
930 )
931 elif f in branch_copies1.copy:
931 elif f in branch_copies1.copy:
932 f2 = branch_copies1.copy[f]
932 f2 = branch_copies1.copy[f]
933 mresult.addfile(
933 mresult.addfile(
934 f,
934 f,
935 mergestatemod.ACTION_MERGE,
935 mergestatemod.ACTION_MERGE,
936 (f, f2, f2, False, pa.node()),
936 (f, f2, f2, False, pa.node()),
937 b'local copied/moved from %s' % f2,
937 b'local copied/moved from %s' % f2,
938 )
938 )
939 elif f in ma: # clean, a different, no remote
939 elif f in ma: # clean, a different, no remote
940 if n1 != ma[f]:
940 if n1 != ma[f]:
941 if acceptremote:
941 if acceptremote:
942 mresult.addfile(
942 mresult.addfile(
943 f,
943 f,
944 mergestatemod.ACTION_REMOVE,
944 mergestatemod.ACTION_REMOVE,
945 None,
945 None,
946 b'remote delete',
946 b'remote delete',
947 )
947 )
948 else:
948 else:
949 mresult.addfile(
949 mresult.addfile(
950 f,
950 f,
951 mergestatemod.ACTION_CHANGED_DELETED,
951 mergestatemod.ACTION_CHANGED_DELETED,
952 (f, None, f, False, pa.node()),
952 (f, None, f, False, pa.node()),
953 b'prompt changed/deleted',
953 b'prompt changed/deleted',
954 )
954 )
955 if branchmerge:
955 if branchmerge:
956 mresult.addcommitinfo(
956 mresult.addcommitinfo(
957 f, b'merge-removal-candidate', b'yes'
957 f, b'merge-removal-candidate', b'yes'
958 )
958 )
959 elif n1 == repo.nodeconstants.addednodeid:
959 elif n1 == repo.nodeconstants.addednodeid:
960 # This file was locally added. We should forget it instead of
960 # This file was locally added. We should forget it instead of
961 # deleting it.
961 # deleting it.
962 mresult.addfile(
962 mresult.addfile(
963 f,
963 f,
964 mergestatemod.ACTION_FORGET,
964 mergestatemod.ACTION_FORGET,
965 None,
965 None,
966 b'remote deleted',
966 b'remote deleted',
967 )
967 )
968 else:
968 else:
969 mresult.addfile(
969 mresult.addfile(
970 f,
970 f,
971 mergestatemod.ACTION_REMOVE,
971 mergestatemod.ACTION_REMOVE,
972 None,
972 None,
973 b'other deleted',
973 b'other deleted',
974 )
974 )
975 if branchmerge:
975 if branchmerge:
976 # the file must be absent after merging,
976 # the file must be absent after merging,
977 # howeber the user might make
977 # howeber the user might make
978 # the file reappear using revert and if they does,
978 # the file reappear using revert and if they does,
979 # we force create a new node
979 # we force create a new node
980 mresult.addcommitinfo(
980 mresult.addcommitinfo(
981 f, b'merge-removal-candidate', b'yes'
981 f, b'merge-removal-candidate', b'yes'
982 )
982 )
983
983
984 else: # file not in ancestor, not in remote
984 else: # file not in ancestor, not in remote
985 mresult.addfile(
985 mresult.addfile(
986 f,
986 f,
987 mergestatemod.ACTION_KEEP_NEW,
987 mergestatemod.ACTION_KEEP_NEW,
988 None,
988 None,
989 b'ancestor missing, remote missing',
989 b'ancestor missing, remote missing',
990 )
990 )
991
991
992 elif n2: # file exists only on remote side
992 elif n2: # file exists only on remote side
993 if f in copied1:
993 if f in copied1:
994 pass # we'll deal with it on m1 side
994 pass # we'll deal with it on m1 side
995 elif f in branch_copies2.movewithdir:
995 elif f in branch_copies2.movewithdir:
996 f2 = branch_copies2.movewithdir[f]
996 f2 = branch_copies2.movewithdir[f]
997 if f2 in m1:
997 if f2 in m1:
998 mresult.addfile(
998 mresult.addfile(
999 f2,
999 f2,
1000 mergestatemod.ACTION_MERGE,
1000 mergestatemod.ACTION_MERGE,
1001 (f2, f, None, False, pa.node()),
1001 (f2, f, None, False, pa.node()),
1002 b'local directory rename, both created',
1002 b'local directory rename, both created',
1003 )
1003 )
1004 else:
1004 else:
1005 mresult.addfile(
1005 mresult.addfile(
1006 f2,
1006 f2,
1007 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1007 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1008 (f, fl2),
1008 (f, fl2),
1009 b'local directory rename - get from %s' % f,
1009 b'local directory rename - get from %s' % f,
1010 )
1010 )
1011 elif f in branch_copies2.copy:
1011 elif f in branch_copies2.copy:
1012 f2 = branch_copies2.copy[f]
1012 f2 = branch_copies2.copy[f]
1013 msg, args = None, None
1013 msg, args = None, None
1014 if f2 in m2:
1014 if f2 in m2:
1015 args = (f2, f, f2, False, pa.node())
1015 args = (f2, f, f2, False, pa.node())
1016 msg = b'remote copied from %s' % f2
1016 msg = b'remote copied from %s' % f2
1017 else:
1017 else:
1018 args = (f2, f, f2, True, pa.node())
1018 args = (f2, f, f2, True, pa.node())
1019 msg = b'remote moved from %s' % f2
1019 msg = b'remote moved from %s' % f2
1020 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
1020 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
1021 elif f not in ma:
1021 elif f not in ma:
1022 # local unknown, remote created: the logic is described by the
1022 # local unknown, remote created: the logic is described by the
1023 # following table:
1023 # following table:
1024 #
1024 #
1025 # force branchmerge different | action
1025 # force branchmerge different | action
1026 # n * * | create
1026 # n * * | create
1027 # y n * | create
1027 # y n * | create
1028 # y y n | create
1028 # y y n | create
1029 # y y y | merge
1029 # y y y | merge
1030 #
1030 #
1031 # Checking whether the files are different is expensive, so we
1031 # Checking whether the files are different is expensive, so we
1032 # don't do that when we can avoid it.
1032 # don't do that when we can avoid it.
1033 if not force:
1033 if not force:
1034 mresult.addfile(
1034 mresult.addfile(
1035 f,
1035 f,
1036 mergestatemod.ACTION_CREATED,
1036 mergestatemod.ACTION_CREATED,
1037 (fl2,),
1037 (fl2,),
1038 b'remote created',
1038 b'remote created',
1039 )
1039 )
1040 elif not branchmerge:
1040 elif not branchmerge:
1041 mresult.addfile(
1041 mresult.addfile(
1042 f,
1042 f,
1043 mergestatemod.ACTION_CREATED,
1043 mergestatemod.ACTION_CREATED,
1044 (fl2,),
1044 (fl2,),
1045 b'remote created',
1045 b'remote created',
1046 )
1046 )
1047 else:
1047 else:
1048 mresult.addfile(
1048 mresult.addfile(
1049 f,
1049 f,
1050 mergestatemod.ACTION_CREATED_MERGE,
1050 mergestatemod.ACTION_CREATED_MERGE,
1051 (fl2, pa.node()),
1051 (fl2, pa.node()),
1052 b'remote created, get or merge',
1052 b'remote created, get or merge',
1053 )
1053 )
1054 elif n2 != ma[f]:
1054 elif n2 != ma[f]:
1055 df = None
1055 df = None
1056 for d in branch_copies1.dirmove:
1056 for d in branch_copies1.dirmove:
1057 if f.startswith(d):
1057 if f.startswith(d):
1058 # new file added in a directory that was moved
1058 # new file added in a directory that was moved
1059 df = branch_copies1.dirmove[d] + f[len(d) :]
1059 df = branch_copies1.dirmove[d] + f[len(d) :]
1060 break
1060 break
1061 if df is not None and df in m1:
1061 if df is not None and df in m1:
1062 mresult.addfile(
1062 mresult.addfile(
1063 df,
1063 df,
1064 mergestatemod.ACTION_MERGE,
1064 mergestatemod.ACTION_MERGE,
1065 (df, f, f, False, pa.node()),
1065 (df, f, f, False, pa.node()),
1066 b'local directory rename - respect move '
1066 b'local directory rename - respect move '
1067 b'from %s' % f,
1067 b'from %s' % f,
1068 )
1068 )
1069 elif acceptremote:
1069 elif acceptremote:
1070 mresult.addfile(
1070 mresult.addfile(
1071 f,
1071 f,
1072 mergestatemod.ACTION_CREATED,
1072 mergestatemod.ACTION_CREATED,
1073 (fl2,),
1073 (fl2,),
1074 b'remote recreating',
1074 b'remote recreating',
1075 )
1075 )
1076 else:
1076 else:
1077 mresult.addfile(
1077 mresult.addfile(
1078 f,
1078 f,
1079 mergestatemod.ACTION_DELETED_CHANGED,
1079 mergestatemod.ACTION_DELETED_CHANGED,
1080 (None, f, f, False, pa.node()),
1080 (None, f, f, False, pa.node()),
1081 b'prompt deleted/changed',
1081 b'prompt deleted/changed',
1082 )
1082 )
1083 if branchmerge:
1083 if branchmerge:
1084 mresult.addcommitinfo(
1084 mresult.addcommitinfo(
1085 f, b'merge-removal-candidate', b'yes'
1085 f, b'merge-removal-candidate', b'yes'
1086 )
1086 )
1087 else:
1087 else:
1088 mresult.addfile(
1088 mresult.addfile(
1089 f,
1089 f,
1090 mergestatemod.ACTION_KEEP_ABSENT,
1090 mergestatemod.ACTION_KEEP_ABSENT,
1091 None,
1091 None,
1092 b'local not present, remote unchanged',
1092 b'local not present, remote unchanged',
1093 )
1093 )
1094 if branchmerge:
1094 if branchmerge:
1095 # the file must be absent after merging
1095 # the file must be absent after merging
1096 # however the user might make
1096 # however the user might make
1097 # the file reappear using revert and if they does,
1097 # the file reappear using revert and if they does,
1098 # we force create a new node
1098 # we force create a new node
1099 mresult.addcommitinfo(f, b'merge-removal-candidate', b'yes')
1099 mresult.addcommitinfo(f, b'merge-removal-candidate', b'yes')
1100
1100
1101 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
1101 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
1102 # If we are merging, look for path conflicts.
1102 # If we are merging, look for path conflicts.
1103 checkpathconflicts(repo, wctx, p2, mresult)
1103 checkpathconflicts(repo, wctx, p2, mresult)
1104
1104
1105 narrowmatch = repo.narrowmatch()
1105 narrowmatch = repo.narrowmatch()
1106 if not narrowmatch.always():
1106 if not narrowmatch.always():
1107 # Updates "actions" in place
1107 # Updates "actions" in place
1108 _filternarrowactions(narrowmatch, branchmerge, mresult)
1108 _filternarrowactions(narrowmatch, branchmerge, mresult)
1109
1109
1110 renamedelete = branch_copies1.renamedelete
1110 renamedelete = branch_copies1.renamedelete
1111 renamedelete.update(branch_copies2.renamedelete)
1111 renamedelete.update(branch_copies2.renamedelete)
1112
1112
1113 mresult.updatevalues(diverge, renamedelete)
1113 mresult.updatevalues(diverge, renamedelete)
1114 return mresult
1114 return mresult
1115
1115
1116
1116
1117 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1117 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1118 """Resolves false conflicts where the nodeid changed but the content
1118 """Resolves false conflicts where the nodeid changed but the content
1119 remained the same."""
1119 remained the same."""
1120 # We force a copy of actions.items() because we're going to mutate
1120 # We force a copy of actions.items() because we're going to mutate
1121 # actions as we resolve trivial conflicts.
1121 # actions as we resolve trivial conflicts.
1122 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
1122 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
1123 if f in ancestor and not wctx[f].cmp(ancestor[f]):
1123 if f in ancestor and not wctx[f].cmp(ancestor[f]):
1124 # local did change but ended up with same content
1124 # local did change but ended up with same content
1125 mresult.addfile(
1125 mresult.addfile(
1126 f, mergestatemod.ACTION_REMOVE, None, b'prompt same'
1126 f, mergestatemod.ACTION_REMOVE, None, b'prompt same'
1127 )
1127 )
1128
1128
1129 for f in list(mresult.files((mergestatemod.ACTION_DELETED_CHANGED,))):
1129 for f in list(mresult.files((mergestatemod.ACTION_DELETED_CHANGED,))):
1130 if f in ancestor and not mctx[f].cmp(ancestor[f]):
1130 if f in ancestor and not mctx[f].cmp(ancestor[f]):
1131 # remote did change but ended up with same content
1131 # remote did change but ended up with same content
1132 mresult.removefile(f) # don't get = keep local deleted
1132 mresult.removefile(f) # don't get = keep local deleted
1133
1133
1134
1134
1135 def calculateupdates(
1135 def calculateupdates(
1136 repo,
1136 repo,
1137 wctx,
1137 wctx,
1138 mctx,
1138 mctx,
1139 ancestors,
1139 ancestors,
1140 branchmerge,
1140 branchmerge,
1141 force,
1141 force,
1142 acceptremote,
1142 acceptremote,
1143 followcopies,
1143 followcopies,
1144 matcher=None,
1144 matcher=None,
1145 mergeforce=False,
1145 mergeforce=False,
1146 ):
1146 ):
1147 """
1147 """
1148 Calculate the actions needed to merge mctx into wctx using ancestors
1148 Calculate the actions needed to merge mctx into wctx using ancestors
1149
1149
1150 Uses manifestmerge() to merge manifest and get list of actions required to
1150 Uses manifestmerge() to merge manifest and get list of actions required to
1151 perform for merging two manifests. If there are multiple ancestors, uses bid
1151 perform for merging two manifests. If there are multiple ancestors, uses bid
1152 merge if enabled.
1152 merge if enabled.
1153
1153
1154 Also filters out actions which are unrequired if repository is sparse.
1154 Also filters out actions which are unrequired if repository is sparse.
1155
1155
1156 Returns mergeresult object same as manifestmerge().
1156 Returns mergeresult object same as manifestmerge().
1157 """
1157 """
1158 # Avoid cycle.
1158 # Avoid cycle.
1159 from . import sparse
1159 from . import sparse
1160
1160
1161 mresult = None
1161 mresult = None
1162 if len(ancestors) == 1: # default
1162 if len(ancestors) == 1: # default
1163 mresult = manifestmerge(
1163 mresult = manifestmerge(
1164 repo,
1164 repo,
1165 wctx,
1165 wctx,
1166 mctx,
1166 mctx,
1167 ancestors[0],
1167 ancestors[0],
1168 branchmerge,
1168 branchmerge,
1169 force,
1169 force,
1170 matcher,
1170 matcher,
1171 acceptremote,
1171 acceptremote,
1172 followcopies,
1172 followcopies,
1173 )
1173 )
1174 _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce)
1174 _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce)
1175 if repo.ui.configbool(b'devel', b'debug.abort-update'):
1175 if repo.ui.configbool(b'devel', b'debug.abort-update'):
1176 exit(1)
1176 exit(1)
1177
1177
1178 else: # only when merge.preferancestor=* - the default
1178 else: # only when merge.preferancestor=* - the default
1179 repo.ui.note(
1179 repo.ui.note(
1180 _(b"note: merging %s and %s using bids from ancestors %s\n")
1180 _(b"note: merging %s and %s using bids from ancestors %s\n")
1181 % (
1181 % (
1182 wctx,
1182 wctx,
1183 mctx,
1183 mctx,
1184 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
1184 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
1185 )
1185 )
1186 )
1186 )
1187
1187
1188 # mapping filename to bids (action method to list af actions)
1188 # mapping filename to bids (action method to list af actions)
1189 # {FILENAME1 : BID1, FILENAME2 : BID2}
1189 # {FILENAME1 : BID1, FILENAME2 : BID2}
1190 # BID is another dictionary which contains
1190 # BID is another dictionary which contains
1191 # mapping of following form:
1191 # mapping of following form:
1192 # {ACTION_X : [info, ..], ACTION_Y : [info, ..]}
1192 # {ACTION_X : [info, ..], ACTION_Y : [info, ..]}
1193 fbids = {}
1193 fbids = {}
1194 mresult = mergeresult()
1194 mresult = mergeresult()
1195 diverge, renamedelete = None, None
1195 diverge, renamedelete = None, None
1196 for ancestor in ancestors:
1196 for ancestor in ancestors:
1197 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
1197 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
1198 mresult1 = manifestmerge(
1198 mresult1 = manifestmerge(
1199 repo,
1199 repo,
1200 wctx,
1200 wctx,
1201 mctx,
1201 mctx,
1202 ancestor,
1202 ancestor,
1203 branchmerge,
1203 branchmerge,
1204 force,
1204 force,
1205 matcher,
1205 matcher,
1206 acceptremote,
1206 acceptremote,
1207 followcopies,
1207 followcopies,
1208 forcefulldiff=True,
1208 forcefulldiff=True,
1209 )
1209 )
1210 _checkunknownfiles(repo, wctx, mctx, force, mresult1, mergeforce)
1210 _checkunknownfiles(repo, wctx, mctx, force, mresult1, mergeforce)
1211
1211
1212 # Track the shortest set of warning on the theory that bid
1212 # Track the shortest set of warning on the theory that bid
1213 # merge will correctly incorporate more information
1213 # merge will correctly incorporate more information
1214 if diverge is None or len(mresult1.diverge) < len(diverge):
1214 if diverge is None or len(mresult1.diverge) < len(diverge):
1215 diverge = mresult1.diverge
1215 diverge = mresult1.diverge
1216 if renamedelete is None or len(renamedelete) < len(
1216 if renamedelete is None or len(renamedelete) < len(
1217 mresult1.renamedelete
1217 mresult1.renamedelete
1218 ):
1218 ):
1219 renamedelete = mresult1.renamedelete
1219 renamedelete = mresult1.renamedelete
1220
1220
1221 # blindly update final mergeresult commitinfo with what we get
1221 # blindly update final mergeresult commitinfo with what we get
1222 # from mergeresult object for each ancestor
1222 # from mergeresult object for each ancestor
1223 # TODO: some commitinfo depends on what bid merge choose and hence
1223 # TODO: some commitinfo depends on what bid merge choose and hence
1224 # we will need to make commitinfo also depend on bid merge logic
1224 # we will need to make commitinfo also depend on bid merge logic
1225 mresult._commitinfo.update(mresult1._commitinfo)
1225 mresult._commitinfo.update(mresult1._commitinfo)
1226
1226
1227 for f, a in mresult1.filemap(sort=True):
1227 for f, a in mresult1.filemap(sort=True):
1228 m, args, msg = a
1228 m, args, msg = a
1229 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m.__bytes__()))
1229 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m.__bytes__()))
1230 if f in fbids:
1230 if f in fbids:
1231 d = fbids[f]
1231 d = fbids[f]
1232 if m in d:
1232 if m in d:
1233 d[m].append(a)
1233 d[m].append(a)
1234 else:
1234 else:
1235 d[m] = [a]
1235 d[m] = [a]
1236 else:
1236 else:
1237 fbids[f] = {m: [a]}
1237 fbids[f] = {m: [a]}
1238
1238
1239 # Call for bids
1239 # Call for bids
1240 # Pick the best bid for each file
1240 # Pick the best bid for each file
1241 repo.ui.note(
1241 repo.ui.note(
1242 _(b'\nauction for merging merge bids (%d ancestors)\n')
1242 _(b'\nauction for merging merge bids (%d ancestors)\n')
1243 % len(ancestors)
1243 % len(ancestors)
1244 )
1244 )
1245 for f, bids in sorted(fbids.items()):
1245 for f, bids in sorted(fbids.items()):
1246 if repo.ui.debugflag:
1246 if repo.ui.debugflag:
1247 repo.ui.debug(b" list of bids for %s:\n" % f)
1247 repo.ui.debug(b" list of bids for %s:\n" % f)
1248 for m, l in sorted(bids.items()):
1248 for m, l in sorted(bids.items()):
1249 for _f, args, msg in l:
1249 for _f, args, msg in l:
1250 repo.ui.debug(b' %s -> %s\n' % (msg, m.__bytes__()))
1250 repo.ui.debug(b' %s -> %s\n' % (msg, m.__bytes__()))
1251 # bids is a mapping from action method to list af actions
1251 # bids is a mapping from action method to list af actions
1252 # Consensus?
1252 # Consensus?
1253 if len(bids) == 1: # all bids are the same kind of method
1253 if len(bids) == 1: # all bids are the same kind of method
1254 m, l = list(bids.items())[0]
1254 m, l = list(bids.items())[0]
1255 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1255 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1256 repo.ui.note(
1256 repo.ui.note(
1257 _(b" %s: consensus for %s\n") % (f, m.__bytes__())
1257 _(b" %s: consensus for %s\n") % (f, m.__bytes__())
1258 )
1258 )
1259 mresult.addfile(f, *l[0])
1259 mresult.addfile(f, *l[0])
1260 continue
1260 continue
1261 # If keep is an option, just do it.
1261 # If keep is an option, just do it.
1262 if mergestatemod.ACTION_KEEP in bids:
1262 if mergestatemod.ACTION_KEEP in bids:
1263 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1263 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1264 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP][0])
1264 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP][0])
1265 continue
1265 continue
1266 # If keep absent is an option, just do that
1266 # If keep absent is an option, just do that
1267 if mergestatemod.ACTION_KEEP_ABSENT in bids:
1267 if mergestatemod.ACTION_KEEP_ABSENT in bids:
1268 repo.ui.note(_(b" %s: picking 'keep absent' action\n") % f)
1268 repo.ui.note(_(b" %s: picking 'keep absent' action\n") % f)
1269 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_ABSENT][0])
1269 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_ABSENT][0])
1270 continue
1270 continue
1271 # ACTION_KEEP_NEW and ACTION_CHANGED_DELETED are conflicting actions
1271 # ACTION_KEEP_NEW and ACTION_CHANGED_DELETED are conflicting actions
1272 # as one say that file is new while other says that file was present
1272 # as one say that file is new while other says that file was present
1273 # earlier too and has a change delete conflict
1273 # earlier too and has a change delete conflict
1274 # Let's fall back to conflicting ACTION_CHANGED_DELETED and let user
1274 # Let's fall back to conflicting ACTION_CHANGED_DELETED and let user
1275 # do the right thing
1275 # do the right thing
1276 if (
1276 if (
1277 mergestatemod.ACTION_CHANGED_DELETED in bids
1277 mergestatemod.ACTION_CHANGED_DELETED in bids
1278 and mergestatemod.ACTION_KEEP_NEW in bids
1278 and mergestatemod.ACTION_KEEP_NEW in bids
1279 ):
1279 ):
1280 repo.ui.note(_(b" %s: picking 'changed/deleted' action\n") % f)
1280 repo.ui.note(_(b" %s: picking 'changed/deleted' action\n") % f)
1281 mresult.addfile(
1281 mresult.addfile(
1282 f, *bids[mergestatemod.ACTION_CHANGED_DELETED][0]
1282 f, *bids[mergestatemod.ACTION_CHANGED_DELETED][0]
1283 )
1283 )
1284 continue
1284 continue
1285 # If keep new is an option, let's just do that
1285 # If keep new is an option, let's just do that
1286 if mergestatemod.ACTION_KEEP_NEW in bids:
1286 if mergestatemod.ACTION_KEEP_NEW in bids:
1287 repo.ui.note(_(b" %s: picking 'keep new' action\n") % f)
1287 repo.ui.note(_(b" %s: picking 'keep new' action\n") % f)
1288 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_NEW][0])
1288 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_NEW][0])
1289 continue
1289 continue
1290 # ACTION_GET and ACTION_DELETE_CHANGED are conflicting actions as
1290 # ACTION_GET and ACTION_DELETE_CHANGED are conflicting actions as
1291 # one action states the file is newer/created on remote side and
1291 # one action states the file is newer/created on remote side and
1292 # other states that file is deleted locally and changed on remote
1292 # other states that file is deleted locally and changed on remote
1293 # side. Let's fallback and rely on a conflicting action to let user
1293 # side. Let's fallback and rely on a conflicting action to let user
1294 # do the right thing
1294 # do the right thing
1295 if (
1295 if (
1296 mergestatemod.ACTION_DELETED_CHANGED in bids
1296 mergestatemod.ACTION_DELETED_CHANGED in bids
1297 and mergestatemod.ACTION_GET in bids
1297 and mergestatemod.ACTION_GET in bids
1298 ):
1298 ):
1299 repo.ui.note(_(b" %s: picking 'delete/changed' action\n") % f)
1299 repo.ui.note(_(b" %s: picking 'delete/changed' action\n") % f)
1300 mresult.addfile(
1300 mresult.addfile(
1301 f, *bids[mergestatemod.ACTION_DELETED_CHANGED][0]
1301 f, *bids[mergestatemod.ACTION_DELETED_CHANGED][0]
1302 )
1302 )
1303 continue
1303 continue
1304 # If there are gets and they all agree [how could they not?], do it.
1304 # If there are gets and they all agree [how could they not?], do it.
1305 if mergestatemod.ACTION_GET in bids:
1305 if mergestatemod.ACTION_GET in bids:
1306 ga0 = bids[mergestatemod.ACTION_GET][0]
1306 ga0 = bids[mergestatemod.ACTION_GET][0]
1307 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1307 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1308 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1308 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1309 mresult.addfile(f, *ga0)
1309 mresult.addfile(f, *ga0)
1310 continue
1310 continue
1311 # TODO: Consider other simple actions such as mode changes
1311 # TODO: Consider other simple actions such as mode changes
1312 # Handle inefficient democrazy.
1312 # Handle inefficient democrazy.
1313 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1313 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1314 for m, l in sorted(bids.items()):
1314 for m, l in sorted(bids.items()):
1315 for _f, args, msg in l:
1315 for _f, args, msg in l:
1316 repo.ui.note(b' %s -> %s\n' % (msg, m.__bytes__()))
1316 repo.ui.note(b' %s -> %s\n' % (msg, m.__bytes__()))
1317 # Pick random action. TODO: Instead, prompt user when resolving
1317 # Pick random action. TODO: Instead, prompt user when resolving
1318 m, l = list(bids.items())[0]
1318 m, l = list(bids.items())[0]
1319 repo.ui.warn(
1319 repo.ui.warn(
1320 _(b' %s: ambiguous merge - picked %s action\n')
1320 _(b' %s: ambiguous merge - picked %s action\n')
1321 % (f, m.__bytes__())
1321 % (f, m.__bytes__())
1322 )
1322 )
1323 mresult.addfile(f, *l[0])
1323 mresult.addfile(f, *l[0])
1324 continue
1324 continue
1325 repo.ui.note(_(b'end of auction\n\n'))
1325 repo.ui.note(_(b'end of auction\n\n'))
1326 mresult.updatevalues(diverge, renamedelete)
1326 mresult.updatevalues(diverge, renamedelete)
1327
1327
1328 if wctx.rev() is None:
1328 if wctx.rev() is None:
1329 _forgetremoved(wctx, mctx, branchmerge, mresult)
1329 _forgetremoved(wctx, mctx, branchmerge, mresult)
1330
1330
1331 sparse.filterupdatesactions(repo, wctx, mctx, branchmerge, mresult)
1331 sparse.filterupdatesactions(repo, wctx, mctx, branchmerge, mresult)
1332 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult)
1332 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult)
1333
1333
1334 return mresult
1334 return mresult
1335
1335
1336
1336
1337 def _getcwd():
1337 def _getcwd():
1338 try:
1338 try:
1339 return encoding.getcwd()
1339 return encoding.getcwd()
1340 except FileNotFoundError:
1340 except FileNotFoundError:
1341 return None
1341 return None
1342
1342
1343
1343
1344 def batchremove(repo, wctx, actions):
1344 def batchremove(repo, wctx, actions):
1345 """apply removes to the working directory
1345 """apply removes to the working directory
1346
1346
1347 yields tuples for progress updates
1347 yields tuples for progress updates
1348 """
1348 """
1349 verbose = repo.ui.verbose
1349 verbose = repo.ui.verbose
1350 cwd = _getcwd()
1350 cwd = _getcwd()
1351 i = 0
1351 i = 0
1352 for f, args, msg in actions:
1352 for f, args, msg in actions:
1353 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1353 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1354 if verbose:
1354 if verbose:
1355 repo.ui.note(_(b"removing %s\n") % f)
1355 repo.ui.note(_(b"removing %s\n") % f)
1356 wctx[f].audit()
1356 wctx[f].audit()
1357 try:
1357 try:
1358 wctx[f].remove(ignoremissing=True)
1358 wctx[f].remove(ignoremissing=True)
1359 except OSError as inst:
1359 except OSError as inst:
1360 repo.ui.warn(
1360 repo.ui.warn(
1361 _(b"update failed to remove %s: %s!\n")
1361 _(b"update failed to remove %s: %s!\n")
1362 % (f, stringutil.forcebytestr(inst.strerror))
1362 % (f, stringutil.forcebytestr(inst.strerror))
1363 )
1363 )
1364 if i == 100:
1364 if i == 100:
1365 yield i, f
1365 yield i, f
1366 i = 0
1366 i = 0
1367 i += 1
1367 i += 1
1368 if i > 0:
1368 if i > 0:
1369 yield i, f
1369 yield i, f
1370
1370
1371 if cwd and not _getcwd():
1371 if cwd and not _getcwd():
1372 # cwd was removed in the course of removing files; print a helpful
1372 # cwd was removed in the course of removing files; print a helpful
1373 # warning.
1373 # warning.
1374 repo.ui.warn(
1374 repo.ui.warn(
1375 _(
1375 _(
1376 b"current directory was removed\n"
1376 b"current directory was removed\n"
1377 b"(consider changing to repo root: %s)\n"
1377 b"(consider changing to repo root: %s)\n"
1378 )
1378 )
1379 % repo.root
1379 % repo.root
1380 )
1380 )
1381
1381
1382
1382
1383 def batchget(repo, mctx, wctx, wantfiledata, actions):
1383 def batchget(repo, mctx, wctx, wantfiledata, actions):
1384 """apply gets to the working directory
1384 """apply gets to the working directory
1385
1385
1386 mctx is the context to get from
1386 mctx is the context to get from
1387
1387
1388 Yields arbitrarily many (False, tuple) for progress updates, followed by
1388 Yields arbitrarily many (False, tuple) for progress updates, followed by
1389 exactly one (True, filedata). When wantfiledata is false, filedata is an
1389 exactly one (True, filedata). When wantfiledata is false, filedata is an
1390 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1390 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1391 mtime) of the file f written for each action.
1391 mtime) of the file f written for each action.
1392 """
1392 """
1393 filedata = {}
1393 filedata = {}
1394 verbose = repo.ui.verbose
1394 verbose = repo.ui.verbose
1395 fctx = mctx.filectx
1395 fctx = mctx.filectx
1396 ui = repo.ui
1396 ui = repo.ui
1397 i = 0
1397 i = 0
1398 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1398 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1399 for f, (flags, backup), msg in actions:
1399 for f, (flags, backup), msg in actions:
1400 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1400 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1401 if verbose:
1401 if verbose:
1402 repo.ui.note(_(b"getting %s\n") % f)
1402 repo.ui.note(_(b"getting %s\n") % f)
1403
1403
1404 if backup:
1404 if backup:
1405 # If a file or directory exists with the same name, back that
1405 # If a file or directory exists with the same name, back that
1406 # up. Otherwise, look to see if there is a file that conflicts
1406 # up. Otherwise, look to see if there is a file that conflicts
1407 # with a directory this file is in, and if so, back that up.
1407 # with a directory this file is in, and if so, back that up.
1408 conflicting = f
1408 conflicting = f
1409 if not repo.wvfs.lexists(f):
1409 if not repo.wvfs.lexists(f):
1410 for p in pathutil.finddirs(f):
1410 for p in pathutil.finddirs(f):
1411 if repo.wvfs.isfileorlink(p):
1411 if repo.wvfs.isfileorlink(p):
1412 conflicting = p
1412 conflicting = p
1413 break
1413 break
1414 if repo.wvfs.lexists(conflicting):
1414 if repo.wvfs.lexists(conflicting):
1415 orig = scmutil.backuppath(ui, repo, conflicting)
1415 orig = scmutil.backuppath(ui, repo, conflicting)
1416 util.rename(repo.wjoin(conflicting), orig)
1416 util.rename(repo.wjoin(conflicting), orig)
1417 wfctx = wctx[f]
1417 wfctx = wctx[f]
1418 wfctx.clearunknown()
1418 wfctx.clearunknown()
1419 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1419 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1420 size = wfctx.write(
1420 size = wfctx.write(
1421 fctx(f).data(),
1421 fctx(f).data(),
1422 flags,
1422 flags,
1423 backgroundclose=True,
1423 backgroundclose=True,
1424 atomictemp=atomictemp,
1424 atomictemp=atomictemp,
1425 )
1425 )
1426 if wantfiledata:
1426 if wantfiledata:
1427 # XXX note that there is a race window between the time we
1427 # XXX note that there is a race window between the time we
1428 # write the clean data into the file and we stats it. So another
1428 # write the clean data into the file and we stats it. So another
1429 # writing process meddling with the file content right after we
1429 # writing process meddling with the file content right after we
1430 # wrote it could cause bad stat data to be gathered.
1430 # wrote it could cause bad stat data to be gathered.
1431 #
1431 #
1432 # They are 2 data we gather here
1432 # They are 2 data we gather here
1433 # - the mode:
1433 # - the mode:
1434 # That we actually just wrote, we should not need to read
1434 # That we actually just wrote, we should not need to read
1435 # it from disk, (except not all mode might have survived
1435 # it from disk, (except not all mode might have survived
1436 # the disk round-trip, which is another issue: we should
1436 # the disk round-trip, which is another issue: we should
1437 # not depends on this)
1437 # not depends on this)
1438 # - the mtime,
1438 # - the mtime,
1439 # On system that support nanosecond precision, the mtime
1439 # On system that support nanosecond precision, the mtime
1440 # could be accurate enough to tell the two writes appart.
1440 # could be accurate enough to tell the two writes appart.
1441 # However gathering it in a racy way make the mtime we
1441 # However gathering it in a racy way make the mtime we
1442 # gather "unreliable".
1442 # gather "unreliable".
1443 #
1443 #
1444 # (note: we get the size from the data we write, which is sane)
1444 # (note: we get the size from the data we write, which is sane)
1445 #
1445 #
1446 # So in theory the data returned here are fully racy, but in
1446 # So in theory the data returned here are fully racy, but in
1447 # practice "it works mostly fine".
1447 # practice "it works mostly fine".
1448 #
1448 #
1449 # Do not be surprised if you end up reading this while looking
1449 # Do not be surprised if you end up reading this while looking
1450 # for the causes of some buggy status. Feel free to improve
1450 # for the causes of some buggy status. Feel free to improve
1451 # this in the future, but we cannot simply stop gathering
1451 # this in the future, but we cannot simply stop gathering
1452 # information. Otherwise `hg status` call made after a large `hg
1452 # information. Otherwise `hg status` call made after a large `hg
1453 # update` runs would have to redo a similar amount of work to
1453 # update` runs would have to redo a similar amount of work to
1454 # restore and compare all files content.
1454 # restore and compare all files content.
1455 s = wfctx.lstat()
1455 s = wfctx.lstat()
1456 mode = s.st_mode
1456 mode = s.st_mode
1457 mtime = timestamp.mtime_of(s)
1457 mtime = timestamp.mtime_of(s)
1458 # for dirstate.update_file's parentfiledata argument:
1458 # for dirstate.update_file's parentfiledata argument:
1459 filedata[f] = (mode, size, mtime)
1459 filedata[f] = (mode, size, mtime)
1460 if i == 100:
1460 if i == 100:
1461 yield False, (i, f)
1461 yield False, (i, f)
1462 i = 0
1462 i = 0
1463 i += 1
1463 i += 1
1464 if i > 0:
1464 if i > 0:
1465 yield False, (i, f)
1465 yield False, (i, f)
1466 yield True, filedata
1466 yield True, filedata
1467
1467
1468
1468
1469 def _prefetchfiles(repo, ctx, mresult):
1469 def _prefetchfiles(repo, ctx, mresult):
1470 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1470 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1471 of merge actions. ``ctx`` is the context being merged in."""
1471 of merge actions. ``ctx`` is the context being merged in."""
1472
1472
1473 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1473 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1474 # don't touch the context to be merged in. 'cd' is skipped, because
1474 # don't touch the context to be merged in. 'cd' is skipped, because
1475 # changed/deleted never resolves to something from the remote side.
1475 # changed/deleted never resolves to something from the remote side.
1476 files = mresult.files(
1476 files = mresult.files(
1477 [
1477 [
1478 mergestatemod.ACTION_GET,
1478 mergestatemod.ACTION_GET,
1479 mergestatemod.ACTION_DELETED_CHANGED,
1479 mergestatemod.ACTION_DELETED_CHANGED,
1480 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1480 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1481 mergestatemod.ACTION_MERGE,
1481 mergestatemod.ACTION_MERGE,
1482 ]
1482 ]
1483 )
1483 )
1484
1484
1485 prefetch = scmutil.prefetchfiles
1485 prefetch = scmutil.prefetchfiles
1486 matchfiles = scmutil.matchfiles
1486 matchfiles = scmutil.matchfiles
1487 prefetch(
1487 prefetch(
1488 repo,
1488 repo,
1489 [
1489 [
1490 (
1490 (
1491 ctx.rev(),
1491 ctx.rev(),
1492 matchfiles(repo, files),
1492 matchfiles(repo, files),
1493 )
1493 )
1494 ],
1494 ],
1495 )
1495 )
1496
1496
1497
1497
1498 @attr.s(frozen=True)
1498 @attr.s(frozen=True)
1499 class updateresult:
1499 class updateresult:
1500 updatedcount = attr.ib()
1500 updatedcount = attr.ib()
1501 mergedcount = attr.ib()
1501 mergedcount = attr.ib()
1502 removedcount = attr.ib()
1502 removedcount = attr.ib()
1503 unresolvedcount = attr.ib()
1503 unresolvedcount = attr.ib()
1504
1504
1505 def isempty(self):
1505 def isempty(self):
1506 return not (
1506 return not (
1507 self.updatedcount
1507 self.updatedcount
1508 or self.mergedcount
1508 or self.mergedcount
1509 or self.removedcount
1509 or self.removedcount
1510 or self.unresolvedcount
1510 or self.unresolvedcount
1511 )
1511 )
1512
1512
1513
1513
1514 def applyupdates(
1514 def applyupdates(
1515 repo,
1515 repo,
1516 mresult,
1516 mresult,
1517 wctx,
1517 wctx,
1518 mctx,
1518 mctx,
1519 overwrite,
1519 overwrite,
1520 wantfiledata,
1520 wantfiledata,
1521 labels=None,
1521 labels=None,
1522 ):
1522 ):
1523 """apply the merge action list to the working directory
1523 """apply the merge action list to the working directory
1524
1524
1525 mresult is a mergeresult object representing result of the merge
1525 mresult is a mergeresult object representing result of the merge
1526 wctx is the working copy context
1526 wctx is the working copy context
1527 mctx is the context to be merged into the working copy
1527 mctx is the context to be merged into the working copy
1528
1528
1529 Return a tuple of (counts, filedata), where counts is a tuple
1529 Return a tuple of (counts, filedata), where counts is a tuple
1530 (updated, merged, removed, unresolved) that describes how many
1530 (updated, merged, removed, unresolved) that describes how many
1531 files were affected by the update, and filedata is as described in
1531 files were affected by the update, and filedata is as described in
1532 batchget.
1532 batchget.
1533 """
1533 """
1534
1534
1535 _prefetchfiles(repo, mctx, mresult)
1535 _prefetchfiles(repo, mctx, mresult)
1536
1536
1537 updated, merged, removed = 0, 0, 0
1537 updated, merged, removed = 0, 0, 0
1538 ms = wctx.mergestate(clean=True)
1538 ms = wctx.mergestate(clean=True)
1539 ms.start(wctx.p1().node(), mctx.node(), labels)
1539 ms.start(wctx.p1().node(), mctx.node(), labels)
1540
1540
1541 for f, op in mresult.commitinfo.items():
1541 for f, op in mresult.commitinfo.items():
1542 # the other side of filenode was choosen while merging, store this in
1542 # the other side of filenode was choosen while merging, store this in
1543 # mergestate so that it can be reused on commit
1543 # mergestate so that it can be reused on commit
1544 ms.addcommitinfo(f, op)
1544 ms.addcommitinfo(f, op)
1545
1545
1546 num_no_op = mresult.len(mergestatemod.MergeAction.NO_OP_ACTIONS)
1546 num_no_op = mresult.len(mergestatemod.MergeAction.NO_OP_ACTIONS)
1547 numupdates = mresult.len() - num_no_op
1547 numupdates = mresult.len() - num_no_op
1548 progress = repo.ui.makeprogress(
1548 progress = repo.ui.makeprogress(
1549 _(b'updating'), unit=_(b'files'), total=numupdates
1549 _(b'updating'), unit=_(b'files'), total=numupdates
1550 )
1550 )
1551
1551
1552 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_REMOVE]:
1552 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_REMOVE]:
1553 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1553 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1554
1554
1555 # record path conflicts
1555 # record path conflicts
1556 for f, args, msg in mresult.getactions(
1556 for f, args, msg in mresult.getactions(
1557 [mergestatemod.ACTION_PATH_CONFLICT], sort=True
1557 [mergestatemod.ACTION_PATH_CONFLICT], sort=True
1558 ):
1558 ):
1559 f1, fo = args
1559 f1, fo = args
1560 s = repo.ui.status
1560 s = repo.ui.status
1561 s(
1561 s(
1562 _(
1562 _(
1563 b"%s: path conflict - a file or link has the same name as a "
1563 b"%s: path conflict - a file or link has the same name as a "
1564 b"directory\n"
1564 b"directory\n"
1565 )
1565 )
1566 % f
1566 % f
1567 )
1567 )
1568 if fo == b'l':
1568 if fo == b'l':
1569 s(_(b"the local file has been renamed to %s\n") % f1)
1569 s(_(b"the local file has been renamed to %s\n") % f1)
1570 else:
1570 else:
1571 s(_(b"the remote file has been renamed to %s\n") % f1)
1571 s(_(b"the remote file has been renamed to %s\n") % f1)
1572 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1572 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1573 ms.addpathconflict(f, f1, fo)
1573 ms.addpathconflict(f, f1, fo)
1574 progress.increment(item=f)
1574 progress.increment(item=f)
1575
1575
1576 # When merging in-memory, we can't support worker processes, so set the
1576 # When merging in-memory, we can't support worker processes, so set the
1577 # per-item cost at 0 in that case.
1577 # per-item cost at 0 in that case.
1578 cost = 0 if wctx.isinmemory() else 0.001
1578 cost = 0 if wctx.isinmemory() else 0.001
1579
1579
1580 # remove in parallel (must come before resolving path conflicts and getting)
1580 # remove in parallel (must come before resolving path conflicts and getting)
1581 prog = worker.worker(
1581 prog = worker.worker(
1582 repo.ui,
1582 repo.ui,
1583 cost,
1583 cost,
1584 batchremove,
1584 batchremove,
1585 (repo, wctx),
1585 (repo, wctx),
1586 list(mresult.getactions([mergestatemod.ACTION_REMOVE], sort=True)),
1586 list(mresult.getactions([mergestatemod.ACTION_REMOVE], sort=True)),
1587 )
1587 )
1588 for i, item in prog:
1588 for i, item in prog:
1589 progress.increment(step=i, item=item)
1589 progress.increment(step=i, item=item)
1590 removed = mresult.len((mergestatemod.ACTION_REMOVE,))
1590 removed = mresult.len((mergestatemod.ACTION_REMOVE,))
1591
1591
1592 # resolve path conflicts (must come before getting)
1592 # resolve path conflicts (must come before getting)
1593 for f, args, msg in mresult.getactions(
1593 for f, args, msg in mresult.getactions(
1594 [mergestatemod.ACTION_PATH_CONFLICT_RESOLVE], sort=True
1594 [mergestatemod.ACTION_PATH_CONFLICT_RESOLVE], sort=True
1595 ):
1595 ):
1596 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1596 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1597 (f0, origf0) = args
1597 (f0, origf0) = args
1598 if wctx[f0].lexists():
1598 if wctx[f0].lexists():
1599 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1599 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1600 wctx[f].audit()
1600 wctx[f].audit()
1601 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1601 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1602 wctx[f0].remove()
1602 wctx[f0].remove()
1603 progress.increment(item=f)
1603 progress.increment(item=f)
1604
1604
1605 # get in parallel.
1605 # get in parallel.
1606 threadsafe = repo.ui.configbool(
1606 threadsafe = repo.ui.configbool(
1607 b'experimental', b'worker.wdir-get-thread-safe'
1607 b'experimental', b'worker.wdir-get-thread-safe'
1608 )
1608 )
1609 prog = worker.worker(
1609 prog = worker.worker(
1610 repo.ui,
1610 repo.ui,
1611 cost,
1611 cost,
1612 batchget,
1612 batchget,
1613 (repo, mctx, wctx, wantfiledata),
1613 (repo, mctx, wctx, wantfiledata),
1614 list(mresult.getactions([mergestatemod.ACTION_GET], sort=True)),
1614 list(mresult.getactions([mergestatemod.ACTION_GET], sort=True)),
1615 threadsafe=threadsafe,
1615 threadsafe=threadsafe,
1616 hasretval=True,
1616 hasretval=True,
1617 )
1617 )
1618 getfiledata = {}
1618 getfiledata = {}
1619 for final, res in prog:
1619 for final, res in prog:
1620 if final:
1620 if final:
1621 getfiledata = res
1621 getfiledata = res
1622 else:
1622 else:
1623 i, item = res
1623 i, item = res
1624 progress.increment(step=i, item=item)
1624 progress.increment(step=i, item=item)
1625
1625
1626 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_GET]:
1626 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_GET]:
1627 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1627 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1628
1628
1629 # forget (manifest only, just log it) (must come first)
1629 # forget (manifest only, just log it) (must come first)
1630 for f, args, msg in mresult.getactions(
1630 for f, args, msg in mresult.getactions(
1631 (mergestatemod.ACTION_FORGET,), sort=True
1631 (mergestatemod.ACTION_FORGET,), sort=True
1632 ):
1632 ):
1633 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1633 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1634 progress.increment(item=f)
1634 progress.increment(item=f)
1635
1635
1636 # re-add (manifest only, just log it)
1636 # re-add (manifest only, just log it)
1637 for f, args, msg in mresult.getactions(
1637 for f, args, msg in mresult.getactions(
1638 (mergestatemod.ACTION_ADD,), sort=True
1638 (mergestatemod.ACTION_ADD,), sort=True
1639 ):
1639 ):
1640 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1640 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1641 progress.increment(item=f)
1641 progress.increment(item=f)
1642
1642
1643 # re-add/mark as modified (manifest only, just log it)
1643 # re-add/mark as modified (manifest only, just log it)
1644 for f, args, msg in mresult.getactions(
1644 for f, args, msg in mresult.getactions(
1645 (mergestatemod.ACTION_ADD_MODIFIED,), sort=True
1645 (mergestatemod.ACTION_ADD_MODIFIED,), sort=True
1646 ):
1646 ):
1647 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1647 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1648 progress.increment(item=f)
1648 progress.increment(item=f)
1649
1649
1650 # keep (noop, just log it)
1650 # keep (noop, just log it)
1651 for a in mergestatemod.MergeAction.NO_OP_ACTIONS:
1651 for a in mergestatemod.MergeAction.NO_OP_ACTIONS:
1652 for f, args, msg in mresult.getactions((a,), sort=True):
1652 for f, args, msg in mresult.getactions((a,), sort=True):
1653 repo.ui.debug(b" %s: %s -> %s\n" % (f, msg, a.__bytes__()))
1653 repo.ui.debug(b" %s: %s -> %s\n" % (f, msg, a.__bytes__()))
1654 # no progress
1654 # no progress
1655
1655
1656 # directory rename, move local
1656 # directory rename, move local
1657 for f, args, msg in mresult.getactions(
1657 for f, args, msg in mresult.getactions(
1658 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,), sort=True
1658 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,), sort=True
1659 ):
1659 ):
1660 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1660 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1661 progress.increment(item=f)
1661 progress.increment(item=f)
1662 f0, flags = args
1662 f0, flags = args
1663 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1663 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1664 wctx[f].audit()
1664 wctx[f].audit()
1665 wctx[f].write(wctx.filectx(f0).data(), flags)
1665 wctx[f].write(wctx.filectx(f0).data(), flags)
1666 wctx[f0].remove()
1666 wctx[f0].remove()
1667
1667
1668 # local directory rename, get
1668 # local directory rename, get
1669 for f, args, msg in mresult.getactions(
1669 for f, args, msg in mresult.getactions(
1670 (mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,), sort=True
1670 (mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,), sort=True
1671 ):
1671 ):
1672 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1672 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1673 progress.increment(item=f)
1673 progress.increment(item=f)
1674 f0, flags = args
1674 f0, flags = args
1675 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1675 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1676 wctx[f].write(mctx.filectx(f0).data(), flags)
1676 wctx[f].write(mctx.filectx(f0).data(), flags)
1677
1677
1678 # exec
1678 # exec
1679 for f, args, msg in mresult.getactions(
1679 for f, args, msg in mresult.getactions(
1680 (mergestatemod.ACTION_EXEC,), sort=True
1680 (mergestatemod.ACTION_EXEC,), sort=True
1681 ):
1681 ):
1682 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1682 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1683 progress.increment(item=f)
1683 progress.increment(item=f)
1684 (flags,) = args
1684 (flags,) = args
1685 wctx[f].audit()
1685 wctx[f].audit()
1686 wctx[f].setflags(b'l' in flags, b'x' in flags)
1686 wctx[f].setflags(b'l' in flags, b'x' in flags)
1687
1687
1688 moves = []
1688 moves = []
1689
1689
1690 # 'cd' and 'dc' actions are treated like other merge conflicts
1690 # 'cd' and 'dc' actions are treated like other merge conflicts
1691 mergeactions = list(
1691 mergeactions = list(
1692 mresult.getactions(
1692 mresult.getactions(
1693 [
1693 [
1694 mergestatemod.ACTION_CHANGED_DELETED,
1694 mergestatemod.ACTION_CHANGED_DELETED,
1695 mergestatemod.ACTION_DELETED_CHANGED,
1695 mergestatemod.ACTION_DELETED_CHANGED,
1696 mergestatemod.ACTION_MERGE,
1696 mergestatemod.ACTION_MERGE,
1697 ],
1697 ],
1698 sort=True,
1698 sort=True,
1699 )
1699 )
1700 )
1700 )
1701 for f, args, msg in mergeactions:
1701 for f, args, msg in mergeactions:
1702 f1, f2, fa, move, anc = args
1702 f1, f2, fa, move, anc = args
1703 if f == b'.hgsubstate': # merged internally
1703 if f == b'.hgsubstate': # merged internally
1704 continue
1704 continue
1705 if f1 is None:
1705 if f1 is None:
1706 fcl = filemerge.absentfilectx(wctx, fa)
1706 fcl = filemerge.absentfilectx(wctx, fa)
1707 else:
1707 else:
1708 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1708 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1709 fcl = wctx[f1]
1709 fcl = wctx[f1]
1710 if f2 is None:
1710 if f2 is None:
1711 fco = filemerge.absentfilectx(mctx, fa)
1711 fco = filemerge.absentfilectx(mctx, fa)
1712 else:
1712 else:
1713 fco = mctx[f2]
1713 fco = mctx[f2]
1714 actx = repo[anc]
1714 actx = repo[anc]
1715 if fa in actx:
1715 if fa in actx:
1716 fca = actx[fa]
1716 fca = actx[fa]
1717 else:
1717 else:
1718 # TODO: move to absentfilectx
1718 # TODO: move to absentfilectx
1719 fca = repo.filectx(f1, fileid=nullrev)
1719 fca = repo.filectx(f1, fileid=nullrev)
1720 ms.add(fcl, fco, fca, f)
1720 ms.add(fcl, fco, fca, f)
1721 if f1 != f and move:
1721 if f1 != f and move:
1722 moves.append(f1)
1722 moves.append(f1)
1723
1723
1724 # remove renamed files after safely stored
1724 # remove renamed files after safely stored
1725 for f in moves:
1725 for f in moves:
1726 if wctx[f].lexists():
1726 if wctx[f].lexists():
1727 repo.ui.debug(b"removing %s\n" % f)
1727 repo.ui.debug(b"removing %s\n" % f)
1728 wctx[f].audit()
1728 wctx[f].audit()
1729 wctx[f].remove()
1729 wctx[f].remove()
1730
1730
1731 # these actions updates the file
1731 # these actions updates the file
1732 updated = mresult.len(
1732 updated = mresult.len(
1733 (
1733 (
1734 mergestatemod.ACTION_GET,
1734 mergestatemod.ACTION_GET,
1735 mergestatemod.ACTION_EXEC,
1735 mergestatemod.ACTION_EXEC,
1736 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1736 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1737 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1737 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1738 )
1738 )
1739 )
1739 )
1740
1740
1741 try:
1741 try:
1742 for f, args, msg in mergeactions:
1742 for f, args, msg in mergeactions:
1743 repo.ui.debug(b" %s: %s -> m\n" % (f, msg))
1743 repo.ui.debug(b" %s: %s -> m\n" % (f, msg))
1744 ms.addcommitinfo(f, {b'merged': b'yes'})
1744 ms.addcommitinfo(f, {b'merged': b'yes'})
1745 progress.increment(item=f)
1745 progress.increment(item=f)
1746 if f == b'.hgsubstate': # subrepo states need updating
1746 if f == b'.hgsubstate': # subrepo states need updating
1747 subrepoutil.submerge(
1747 subrepoutil.submerge(
1748 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1748 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1749 )
1749 )
1750 continue
1750 continue
1751 wctx[f].audit()
1751 wctx[f].audit()
1752 ms.resolve(f, wctx)
1752 ms.resolve(f, wctx)
1753
1753
1754 except error.InterventionRequired:
1754 except error.InterventionRequired:
1755 # If the user has merge.on-failure=halt, catch the error and close the
1755 # If the user has merge.on-failure=halt, catch the error and close the
1756 # merge state "properly".
1756 # merge state "properly".
1757 pass
1757 pass
1758 finally:
1758 finally:
1759 ms.commit()
1759 ms.commit()
1760
1760
1761 unresolved = ms.unresolvedcount()
1761 unresolved = ms.unresolvedcount()
1762
1762
1763 msupdated, msmerged, msremoved = ms.counts()
1763 msupdated, msmerged, msremoved = ms.counts()
1764 updated += msupdated
1764 updated += msupdated
1765 merged += msmerged
1765 merged += msmerged
1766 removed += msremoved
1766 removed += msremoved
1767
1767
1768 extraactions = ms.actions()
1768 extraactions = ms.actions()
1769
1769
1770 progress.complete()
1770 progress.complete()
1771 return (
1771 return (
1772 updateresult(updated, merged, removed, unresolved),
1772 updateresult(updated, merged, removed, unresolved),
1773 getfiledata,
1773 getfiledata,
1774 extraactions,
1774 extraactions,
1775 )
1775 )
1776
1776
1777
1777
1778 def _advertisefsmonitor(repo, num_gets, p1node):
1778 def _advertisefsmonitor(repo, num_gets, p1node):
1779 # Advertise fsmonitor when its presence could be useful.
1779 # Advertise fsmonitor when its presence could be useful.
1780 #
1780 #
1781 # We only advertise when performing an update from an empty working
1781 # We only advertise when performing an update from an empty working
1782 # directory. This typically only occurs during initial clone.
1782 # directory. This typically only occurs during initial clone.
1783 #
1783 #
1784 # We give users a mechanism to disable the warning in case it is
1784 # We give users a mechanism to disable the warning in case it is
1785 # annoying.
1785 # annoying.
1786 #
1786 #
1787 # We only allow on Linux and MacOS because that's where fsmonitor is
1787 # We only allow on Linux and MacOS because that's where fsmonitor is
1788 # considered stable.
1788 # considered stable.
1789 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1789 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1790 fsmonitorthreshold = repo.ui.configint(
1790 fsmonitorthreshold = repo.ui.configint(
1791 b'fsmonitor', b'warn_update_file_count'
1791 b'fsmonitor', b'warn_update_file_count'
1792 )
1792 )
1793 # avoid cycle dirstate -> sparse -> merge -> dirstate
1793 # avoid cycle dirstate -> sparse -> merge -> dirstate
1794 dirstate_rustmod = policy.importrust("dirstate")
1794 dirstate_rustmod = policy.importrust("dirstate")
1795
1795
1796 if dirstate_rustmod is not None:
1796 if dirstate_rustmod is not None:
1797 # When using rust status, fsmonitor becomes necessary at higher sizes
1797 # When using rust status, fsmonitor becomes necessary at higher sizes
1798 fsmonitorthreshold = repo.ui.configint(
1798 fsmonitorthreshold = repo.ui.configint(
1799 b'fsmonitor',
1799 b'fsmonitor',
1800 b'warn_update_file_count_rust',
1800 b'warn_update_file_count_rust',
1801 )
1801 )
1802
1802
1803 try:
1803 try:
1804 # avoid cycle: extensions -> cmdutil -> merge
1804 # avoid cycle: extensions -> cmdutil -> merge
1805 from . import extensions
1805 from . import extensions
1806
1806
1807 extensions.find(b'fsmonitor')
1807 extensions.find(b'fsmonitor')
1808 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1808 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1809 # We intentionally don't look at whether fsmonitor has disabled
1809 # We intentionally don't look at whether fsmonitor has disabled
1810 # itself because a) fsmonitor may have already printed a warning
1810 # itself because a) fsmonitor may have already printed a warning
1811 # b) we only care about the config state here.
1811 # b) we only care about the config state here.
1812 except KeyError:
1812 except KeyError:
1813 fsmonitorenabled = False
1813 fsmonitorenabled = False
1814
1814
1815 if (
1815 if (
1816 fsmonitorwarning
1816 fsmonitorwarning
1817 and not fsmonitorenabled
1817 and not fsmonitorenabled
1818 and p1node == repo.nullid
1818 and p1node == repo.nullid
1819 and num_gets >= fsmonitorthreshold
1819 and num_gets >= fsmonitorthreshold
1820 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1820 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1821 ):
1821 ):
1822 repo.ui.warn(
1822 repo.ui.warn(
1823 _(
1823 _(
1824 b'(warning: large working directory being used without '
1824 b'(warning: large working directory being used without '
1825 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1825 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1826 b'see "hg help -e fsmonitor")\n'
1826 b'see "hg help -e fsmonitor")\n'
1827 )
1827 )
1828 )
1828 )
1829
1829
1830
1830
1831 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1831 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1832 UPDATECHECK_NONE = b'none'
1832 UPDATECHECK_NONE = b'none'
1833 UPDATECHECK_LINEAR = b'linear'
1833 UPDATECHECK_LINEAR = b'linear'
1834 UPDATECHECK_NO_CONFLICT = b'noconflict'
1834 UPDATECHECK_NO_CONFLICT = b'noconflict'
1835
1835
1836
1836
1837 def _update(
1837 def _update(
1838 repo,
1838 repo,
1839 node,
1839 node,
1840 branchmerge,
1840 branchmerge,
1841 force,
1841 force,
1842 ancestor=None,
1842 ancestor=None,
1843 mergeancestor=False,
1843 mergeancestor=False,
1844 labels=None,
1844 labels=None,
1845 matcher=None,
1845 matcher=None,
1846 mergeforce=False,
1846 mergeforce=False,
1847 updatedirstate=True,
1847 updatedirstate=True,
1848 updatecheck=None,
1848 updatecheck=None,
1849 wc=None,
1849 wc=None,
1850 ):
1850 ):
1851 """
1851 """
1852 Perform a merge between the working directory and the given node
1852 Perform a merge between the working directory and the given node
1853
1853
1854 node = the node to update to
1854 node = the node to update to
1855 branchmerge = whether to merge between branches
1855 branchmerge = whether to merge between branches
1856 force = whether to force branch merging or file overwriting
1856 force = whether to force branch merging or file overwriting
1857 matcher = a matcher to filter file lists (dirstate not updated)
1857 matcher = a matcher to filter file lists (dirstate not updated)
1858 mergeancestor = whether it is merging with an ancestor. If true,
1858 mergeancestor = whether it is merging with an ancestor. If true,
1859 we should accept the incoming changes for any prompts that occur.
1859 we should accept the incoming changes for any prompts that occur.
1860 If false, merging with an ancestor (fast-forward) is only allowed
1860 If false, merging with an ancestor (fast-forward) is only allowed
1861 between different named branches. This flag is used by rebase extension
1861 between different named branches. This flag is used by rebase extension
1862 as a temporary fix and should be avoided in general.
1862 as a temporary fix and should be avoided in general.
1863 labels = labels to use for local, other, and base
1863 labels = labels to use for local, other, and base
1864 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1864 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1865 this is True, then 'force' should be True as well.
1865 this is True, then 'force' should be True as well.
1866
1866
1867 The table below shows all the behaviors of the update command given the
1867 The table below shows all the behaviors of the update command given the
1868 -c/--check and -C/--clean or no options, whether the working directory is
1868 -c/--check and -C/--clean or no options, whether the working directory is
1869 dirty, whether a revision is specified, and the relationship of the parent
1869 dirty, whether a revision is specified, and the relationship of the parent
1870 rev to the target rev (linear or not). Match from top first. The -n
1870 rev to the target rev (linear or not). Match from top first. The -n
1871 option doesn't exist on the command line, but represents the
1871 option doesn't exist on the command line, but represents the
1872 experimental.updatecheck=noconflict option.
1872 experimental.updatecheck=noconflict option.
1873
1873
1874 This logic is tested by test-update-branches.t.
1874 This logic is tested by test-update-branches.t.
1875
1875
1876 -c -C -n -m dirty rev linear | result
1876 -c -C -n -m dirty rev linear | result
1877 y y * * * * * | (1)
1877 y y * * * * * | (1)
1878 y * y * * * * | (1)
1878 y * y * * * * | (1)
1879 y * * y * * * | (1)
1879 y * * y * * * | (1)
1880 * y y * * * * | (1)
1880 * y y * * * * | (1)
1881 * y * y * * * | (1)
1881 * y * y * * * | (1)
1882 * * y y * * * | (1)
1882 * * y y * * * | (1)
1883 * * * * * n n | x
1883 * * * * * n n | x
1884 * * * * n * * | ok
1884 * * * * n * * | ok
1885 n n n n y * y | merge
1885 n n n n y * y | merge
1886 n n n n y y n | (2)
1886 n n n n y y n | (2)
1887 n n n y y * * | merge
1887 n n n y y * * | merge
1888 n n y n y * * | merge if no conflict
1888 n n y n y * * | merge if no conflict
1889 n y n n y * * | discard
1889 n y n n y * * | discard
1890 y n n n y * * | (3)
1890 y n n n y * * | (3)
1891
1891
1892 x = can't happen
1892 x = can't happen
1893 * = don't-care
1893 * = don't-care
1894 1 = incompatible options (checked in commands.py)
1894 1 = incompatible options (checked in commands.py)
1895 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1895 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1896 3 = abort: uncommitted changes (checked in commands.py)
1896 3 = abort: uncommitted changes (checked in commands.py)
1897
1897
1898 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1898 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1899 to repo[None] if None is passed.
1899 to repo[None] if None is passed.
1900
1900
1901 Return the same tuple as applyupdates().
1901 Return the same tuple as applyupdates().
1902 """
1902 """
1903 # Avoid cycle.
1903 # Avoid cycle.
1904 from . import sparse
1904 from . import sparse
1905
1905
1906 # This function used to find the default destination if node was None, but
1906 # This function used to find the default destination if node was None, but
1907 # that's now in destutil.py.
1907 # that's now in destutil.py.
1908 assert node is not None
1908 assert node is not None
1909 if not branchmerge and not force:
1909 if not branchmerge and not force:
1910 # TODO: remove the default once all callers that pass branchmerge=False
1910 # TODO: remove the default once all callers that pass branchmerge=False
1911 # and force=False pass a value for updatecheck. We may want to allow
1911 # and force=False pass a value for updatecheck. We may want to allow
1912 # updatecheck='abort' to better suppport some of these callers.
1912 # updatecheck='abort' to better suppport some of these callers.
1913 if updatecheck is None:
1913 if updatecheck is None:
1914 updatecheck = UPDATECHECK_LINEAR
1914 updatecheck = UPDATECHECK_LINEAR
1915 okay = (UPDATECHECK_NONE, UPDATECHECK_LINEAR, UPDATECHECK_NO_CONFLICT)
1915 okay = (UPDATECHECK_NONE, UPDATECHECK_LINEAR, UPDATECHECK_NO_CONFLICT)
1916 if updatecheck not in okay:
1916 if updatecheck not in okay:
1917 msg = r'Invalid updatecheck %r (can accept %r)'
1917 msg = r'Invalid updatecheck %r (can accept %r)'
1918 msg %= (updatecheck, okay)
1918 msg %= (updatecheck, okay)
1919 raise ValueError(msg)
1919 raise ValueError(msg)
1920 if wc is not None and wc.isinmemory():
1920 if wc is not None and wc.isinmemory():
1921 maybe_wlock = util.nullcontextmanager()
1921 maybe_wlock = util.nullcontextmanager()
1922 else:
1922 else:
1923 maybe_wlock = repo.wlock()
1923 maybe_wlock = repo.wlock()
1924 with maybe_wlock:
1924 with maybe_wlock:
1925 if wc is None:
1925 if wc is None:
1926 wc = repo[None]
1926 wc = repo[None]
1927 pl = wc.parents()
1927 pl = wc.parents()
1928 p1 = pl[0]
1928 p1 = pl[0]
1929 p2 = repo[node]
1929 p2 = repo[node]
1930 if ancestor is not None:
1930 if ancestor is not None:
1931 pas = [repo[ancestor]]
1931 pas = [repo[ancestor]]
1932 else:
1932 else:
1933 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1933 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1934 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1934 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1935 pas = [repo[anc] for anc in (sorted(cahs) or [repo.nullid])]
1935 pas = [repo[anc] for anc in (sorted(cahs) or [repo.nullid])]
1936 else:
1936 else:
1937 pas = [p1.ancestor(p2, warn=branchmerge)]
1937 pas = [p1.ancestor(p2, warn=branchmerge)]
1938
1938
1939 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1939 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1940
1940
1941 overwrite = force and not branchmerge
1941 overwrite = force and not branchmerge
1942 ### check phase
1942 ### check phase
1943 if not overwrite:
1943 if not overwrite:
1944 if len(pl) > 1:
1944 if len(pl) > 1:
1945 raise error.StateError(_(b"outstanding uncommitted merge"))
1945 raise error.StateError(_(b"outstanding uncommitted merge"))
1946 ms = wc.mergestate()
1946 ms = wc.mergestate()
1947 if ms.unresolvedcount():
1947 if ms.unresolvedcount():
1948 msg = _(b"outstanding merge conflicts")
1948 msg = _(b"outstanding merge conflicts")
1949 hint = _(b"use 'hg resolve' to resolve")
1949 hint = _(b"use 'hg resolve' to resolve")
1950 raise error.StateError(msg, hint=hint)
1950 raise error.StateError(msg, hint=hint)
1951 if branchmerge:
1951 if branchmerge:
1952 m_a = _(b"merging with a working directory ancestor has no effect")
1952 m_a = _(b"merging with a working directory ancestor has no effect")
1953 if pas == [p2]:
1953 if pas == [p2]:
1954 raise error.Abort(m_a)
1954 raise error.Abort(m_a)
1955 elif pas == [p1]:
1955 elif pas == [p1]:
1956 if not mergeancestor and wc.branch() == p2.branch():
1956 if not mergeancestor and wc.branch() == p2.branch():
1957 msg = _(b"nothing to merge")
1957 msg = _(b"nothing to merge")
1958 hint = _(b"use 'hg update' or check 'hg heads'")
1958 hint = _(b"use 'hg update' or check 'hg heads'")
1959 raise error.Abort(msg, hint=hint)
1959 raise error.Abort(msg, hint=hint)
1960 if not force and (wc.files() or wc.deleted()):
1960 if not force and (wc.files() or wc.deleted()):
1961 msg = _(b"uncommitted changes")
1961 msg = _(b"uncommitted changes")
1962 hint = _(b"use 'hg status' to list changes")
1962 hint = _(b"use 'hg status' to list changes")
1963 raise error.StateError(msg, hint=hint)
1963 raise error.StateError(msg, hint=hint)
1964 if not wc.isinmemory():
1964 if not wc.isinmemory():
1965 for s in sorted(wc.substate):
1965 for s in sorted(wc.substate):
1966 wc.sub(s).bailifchanged()
1966 wc.sub(s).bailifchanged()
1967
1967
1968 elif not overwrite:
1968 elif not overwrite:
1969 if p1 == p2: # no-op update
1969 if p1 == p2: # no-op update
1970 # call the hooks and exit early
1970 # call the hooks and exit early
1971 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1971 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1972 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1972 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1973 return updateresult(0, 0, 0, 0)
1973 return updateresult(0, 0, 0, 0)
1974
1974
1975 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1975 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1976 [p1],
1976 [p1],
1977 [p2],
1977 [p2],
1978 ): # nonlinear
1978 ): # nonlinear
1979 dirty = wc.dirty(missing=True)
1979 dirty = wc.dirty(missing=True)
1980 if dirty:
1980 if dirty:
1981 # Branching is a bit strange to ensure we do the minimal
1981 # Branching is a bit strange to ensure we do the minimal
1982 # amount of call to obsutil.foreground.
1982 # amount of call to obsutil.foreground.
1983 foreground = obsutil.foreground(repo, [p1.node()])
1983 foreground = obsutil.foreground(repo, [p1.node()])
1984 # note: the <node> variable contains a random identifier
1984 # note: the <node> variable contains a random identifier
1985 if repo[node].node() in foreground:
1985 if repo[node].node() in foreground:
1986 pass # allow updating to successors
1986 pass # allow updating to successors
1987 else:
1987 else:
1988 msg = _(b"uncommitted changes")
1988 msg = _(b"uncommitted changes")
1989 hint = _(b"commit or update --clean to discard changes")
1989 hint = _(b"commit or update --clean to discard changes")
1990 raise error.UpdateAbort(msg, hint=hint)
1990 raise error.UpdateAbort(msg, hint=hint)
1991 else:
1991 else:
1992 # Allow jumping branches if clean and specific rev given
1992 # Allow jumping branches if clean and specific rev given
1993 pass
1993 pass
1994
1994
1995 if overwrite:
1995 if overwrite:
1996 pas = [wc]
1996 pas = [wc]
1997 elif not branchmerge:
1997 elif not branchmerge:
1998 pas = [p1]
1998 pas = [p1]
1999
1999
2000 # deprecated config: merge.followcopies
2000 # deprecated config: merge.followcopies
2001 followcopies = repo.ui.configbool(b'merge', b'followcopies')
2001 followcopies = repo.ui.configbool(b'merge', b'followcopies')
2002 if overwrite:
2002 if overwrite:
2003 followcopies = False
2003 followcopies = False
2004 elif not pas[0]:
2004 elif not pas[0]:
2005 followcopies = False
2005 followcopies = False
2006 if not branchmerge and not wc.dirty(missing=True):
2006 if not branchmerge and not wc.dirty(missing=True):
2007 followcopies = False
2007 followcopies = False
2008
2008
2009 ### calculate phase
2009 ### calculate phase
2010 mresult = calculateupdates(
2010 mresult = calculateupdates(
2011 repo,
2011 repo,
2012 wc,
2012 wc,
2013 p2,
2013 p2,
2014 pas,
2014 pas,
2015 branchmerge,
2015 branchmerge,
2016 force,
2016 force,
2017 mergeancestor,
2017 mergeancestor,
2018 followcopies,
2018 followcopies,
2019 matcher=matcher,
2019 matcher=matcher,
2020 mergeforce=mergeforce,
2020 mergeforce=mergeforce,
2021 )
2021 )
2022
2022
2023 if updatecheck == UPDATECHECK_NO_CONFLICT:
2023 if updatecheck == UPDATECHECK_NO_CONFLICT:
2024 if mresult.hasconflicts():
2024 if mresult.hasconflicts():
2025 msg = _(b"conflicting changes")
2025 msg = _(b"conflicting changes")
2026 hint = _(b"commit or update --clean to discard changes")
2026 hint = _(b"commit or update --clean to discard changes")
2027 raise error.StateError(msg, hint=hint)
2027 raise error.StateError(msg, hint=hint)
2028
2028
2029 # Prompt and create actions. Most of this is in the resolve phase
2029 # Prompt and create actions. Most of this is in the resolve phase
2030 # already, but we can't handle .hgsubstate in filemerge or
2030 # already, but we can't handle .hgsubstate in filemerge or
2031 # subrepoutil.submerge yet so we have to keep prompting for it.
2031 # subrepoutil.submerge yet so we have to keep prompting for it.
2032 vals = mresult.getfile(b'.hgsubstate')
2032 vals = mresult.getfile(b'.hgsubstate')
2033 if vals:
2033 if vals:
2034 f = b'.hgsubstate'
2034 f = b'.hgsubstate'
2035 m, args, msg = vals
2035 m, args, msg = vals
2036 prompts = filemerge.partextras(labels)
2036 prompts = filemerge.partextras(labels)
2037 prompts[b'f'] = f
2037 prompts[b'f'] = f
2038 if m == mergestatemod.ACTION_CHANGED_DELETED:
2038 if m == mergestatemod.ACTION_CHANGED_DELETED:
2039 if repo.ui.promptchoice(
2039 if repo.ui.promptchoice(
2040 _(
2040 _(
2041 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
2041 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
2042 b"use (c)hanged version or (d)elete?"
2042 b"use (c)hanged version or (d)elete?"
2043 b"$$ &Changed $$ &Delete"
2043 b"$$ &Changed $$ &Delete"
2044 )
2044 )
2045 % prompts,
2045 % prompts,
2046 0,
2046 0,
2047 ):
2047 ):
2048 mresult.addfile(
2048 mresult.addfile(
2049 f,
2049 f,
2050 mergestatemod.ACTION_REMOVE,
2050 mergestatemod.ACTION_REMOVE,
2051 None,
2051 None,
2052 b'prompt delete',
2052 b'prompt delete',
2053 )
2053 )
2054 elif f in p1:
2054 elif f in p1:
2055 mresult.addfile(
2055 mresult.addfile(
2056 f,
2056 f,
2057 mergestatemod.ACTION_ADD_MODIFIED,
2057 mergestatemod.ACTION_ADD_MODIFIED,
2058 None,
2058 None,
2059 b'prompt keep',
2059 b'prompt keep',
2060 )
2060 )
2061 else:
2061 else:
2062 mresult.addfile(
2062 mresult.addfile(
2063 f,
2063 f,
2064 mergestatemod.ACTION_ADD,
2064 mergestatemod.ACTION_ADD,
2065 None,
2065 None,
2066 b'prompt keep',
2066 b'prompt keep',
2067 )
2067 )
2068 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2068 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2069 f1, f2, fa, move, anc = args
2069 f1, f2, fa, move, anc = args
2070 flags = p2[f2].flags()
2070 flags = p2[f2].flags()
2071 if (
2071 if (
2072 repo.ui.promptchoice(
2072 repo.ui.promptchoice(
2073 _(
2073 _(
2074 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
2074 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
2075 b"use (c)hanged version or leave (d)eleted?"
2075 b"use (c)hanged version or leave (d)eleted?"
2076 b"$$ &Changed $$ &Deleted"
2076 b"$$ &Changed $$ &Deleted"
2077 )
2077 )
2078 % prompts,
2078 % prompts,
2079 0,
2079 0,
2080 )
2080 )
2081 == 0
2081 == 0
2082 ):
2082 ):
2083 mresult.addfile(
2083 mresult.addfile(
2084 f,
2084 f,
2085 mergestatemod.ACTION_GET,
2085 mergestatemod.ACTION_GET,
2086 (flags, False),
2086 (flags, False),
2087 b'prompt recreating',
2087 b'prompt recreating',
2088 )
2088 )
2089 else:
2089 else:
2090 mresult.removefile(f)
2090 mresult.removefile(f)
2091
2091
2092 if not util.fscasesensitive(repo.path):
2092 if not util.fscasesensitive(repo.path):
2093 # check collision between files only in p2 for clean update
2093 # check collision between files only in p2 for clean update
2094 if not branchmerge and (
2094 if not branchmerge and (
2095 force or not wc.dirty(missing=True, branch=False)
2095 force or not wc.dirty(missing=True, branch=False)
2096 ):
2096 ):
2097 _checkcollision(repo, p2.manifest(), None)
2097 _checkcollision(repo, p2.manifest(), None)
2098 else:
2098 else:
2099 _checkcollision(repo, wc.manifest(), mresult)
2099 _checkcollision(repo, wc.manifest(), mresult)
2100
2100
2101 # divergent renames
2101 # divergent renames
2102 for f, fl in sorted(mresult.diverge.items()):
2102 for f, fl in sorted(mresult.diverge.items()):
2103 repo.ui.warn(
2103 repo.ui.warn(
2104 _(
2104 _(
2105 b"note: possible conflict - %s was renamed "
2105 b"note: possible conflict - %s was renamed "
2106 b"multiple times to:\n"
2106 b"multiple times to:\n"
2107 )
2107 )
2108 % f
2108 % f
2109 )
2109 )
2110 for nf in sorted(fl):
2110 for nf in sorted(fl):
2111 repo.ui.warn(b" %s\n" % nf)
2111 repo.ui.warn(b" %s\n" % nf)
2112
2112
2113 # rename and delete
2113 # rename and delete
2114 for f, fl in sorted(mresult.renamedelete.items()):
2114 for f, fl in sorted(mresult.renamedelete.items()):
2115 repo.ui.warn(
2115 repo.ui.warn(
2116 _(
2116 _(
2117 b"note: possible conflict - %s was deleted "
2117 b"note: possible conflict - %s was deleted "
2118 b"and renamed to:\n"
2118 b"and renamed to:\n"
2119 )
2119 )
2120 % f
2120 % f
2121 )
2121 )
2122 for nf in sorted(fl):
2122 for nf in sorted(fl):
2123 repo.ui.warn(b" %s\n" % nf)
2123 repo.ui.warn(b" %s\n" % nf)
2124
2124
2125 ### apply phase
2125 ### apply phase
2126 if not branchmerge: # just jump to the new rev
2126 if not branchmerge: # just jump to the new rev
2127 fp1, fp2, xp1, xp2 = fp2, repo.nullid, xp2, b''
2127 fp1, fp2, xp1, xp2 = fp2, repo.nullid, xp2, b''
2128 # If we're doing a partial update, we need to skip updating
2128 # If we're doing a partial update, we need to skip updating
2129 # the dirstate.
2129 # the dirstate.
2130 always = matcher is None or matcher.always()
2130 always = matcher is None or matcher.always()
2131 updatedirstate = updatedirstate and always and not wc.isinmemory()
2131 updatedirstate = updatedirstate and always and not wc.isinmemory()
2132 if updatedirstate:
2132 if updatedirstate:
2133 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
2133 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
2134 # note that we're in the middle of an update
2134 # note that we're in the middle of an update
2135 repo.vfs.write(b'updatestate', p2.hex())
2135 repo.vfs.write(b'updatestate', p2.hex())
2136
2136
2137 _advertisefsmonitor(
2137 _advertisefsmonitor(
2138 repo, mresult.len((mergestatemod.ACTION_GET,)), p1.node()
2138 repo, mresult.len((mergestatemod.ACTION_GET,)), p1.node()
2139 )
2139 )
2140
2140
2141 wantfiledata = updatedirstate and not branchmerge
2141 wantfiledata = updatedirstate and not branchmerge
2142 stats, getfiledata, extraactions = applyupdates(
2142 stats, getfiledata, extraactions = applyupdates(
2143 repo,
2143 repo,
2144 mresult,
2144 mresult,
2145 wc,
2145 wc,
2146 p2,
2146 p2,
2147 overwrite,
2147 overwrite,
2148 wantfiledata,
2148 wantfiledata,
2149 labels=labels,
2149 labels=labels,
2150 )
2150 )
2151
2151
2152 if updatedirstate:
2152 if updatedirstate:
2153 if extraactions:
2153 if extraactions:
2154 for k, acts in extraactions.items():
2154 for k, acts in extraactions.items():
2155 for a in acts:
2155 for a in acts:
2156 mresult.addfile(a[0], k, *a[1:])
2156 mresult.addfile(a[0], k, *a[1:])
2157 if k == mergestatemod.ACTION_GET and wantfiledata:
2157 if k == mergestatemod.ACTION_GET and wantfiledata:
2158 # no filedata until mergestate is updated to provide it
2158 # no filedata until mergestate is updated to provide it
2159 for a in acts:
2159 for a in acts:
2160 getfiledata[a[0]] = None
2160 getfiledata[a[0]] = None
2161
2161
2162 assert len(getfiledata) == (
2162 assert len(getfiledata) == (
2163 mresult.len((mergestatemod.ACTION_GET,)) if wantfiledata else 0
2163 mresult.len((mergestatemod.ACTION_GET,)) if wantfiledata else 0
2164 )
2164 )
2165 with repo.dirstate.changing_parents(repo):
2165 with repo.dirstate.changing_parents(repo):
2166 ### Filter Filedata
2166 ### Filter Filedata
2167 #
2167 #
2168 # We gathered "cache" information for the clean file while
2168 # We gathered "cache" information for the clean file while
2169 # updating them: mtime, size and mode.
2169 # updating them: mtime, size and mode.
2170 #
2170 #
2171 # At the time this comment is written, they are various issues
2171 # At the time this comment is written, they are various issues
2172 # with how we gather the `mode` and `mtime` information (see
2172 # with how we gather the `mode` and `mtime` information (see
2173 # the comment in `batchget`).
2173 # the comment in `batchget`).
2174 #
2174 #
2175 # We are going to smooth one of this issue here : mtime ambiguity.
2175 # We are going to smooth one of this issue here : mtime ambiguity.
2176 #
2176 #
2177 # i.e. even if the mtime gathered during `batchget` was
2177 # i.e. even if the mtime gathered during `batchget` was
2178 # correct[1] a change happening right after it could change the
2178 # correct[1] a change happening right after it could change the
2179 # content while keeping the same mtime[2].
2179 # content while keeping the same mtime[2].
2180 #
2180 #
2181 # When we reach the current code, the "on disk" part of the
2181 # When we reach the current code, the "on disk" part of the
2182 # update operation is finished. We still assume that no other
2182 # update operation is finished. We still assume that no other
2183 # process raced that "on disk" part, but we want to at least
2183 # process raced that "on disk" part, but we want to at least
2184 # prevent later file change to alter the content of the file
2184 # prevent later file change to alter the content of the file
2185 # right after the update operation. So quickly that the same
2185 # right after the update operation. So quickly that the same
2186 # mtime is record for the operation.
2186 # mtime is record for the operation.
2187 # To prevent such ambiguity to happens, we will only keep the
2187 # To prevent such ambiguity to happens, we will only keep the
2188 # "file data" for files with mtime that are stricly in the past,
2188 # "file data" for files with mtime that are stricly in the past,
2189 # i.e. whose mtime is strictly lower than the current time.
2189 # i.e. whose mtime is strictly lower than the current time.
2190 #
2190 #
2191 # This protect us from race conditions from operation that could
2191 # This protect us from race conditions from operation that could
2192 # run right after this one, especially other Mercurial
2192 # run right after this one, especially other Mercurial
2193 # operation that could be waiting for the wlock to touch files
2193 # operation that could be waiting for the wlock to touch files
2194 # content and the dirstate.
2194 # content and the dirstate.
2195 #
2195 #
2196 # In an ideal world, we could only get reliable information in
2196 # In an ideal world, we could only get reliable information in
2197 # `getfiledata` (from `getbatch`), however the current approach
2197 # `getfiledata` (from `getbatch`), however the current approach
2198 # have been a successful compromise since many years.
2198 # have been a successful compromise since many years.
2199 #
2199 #
2200 # At the time this comment is written, not using any "cache"
2200 # At the time this comment is written, not using any "cache"
2201 # file data at all here would not be viable. As it would result is
2201 # file data at all here would not be viable. As it would result is
2202 # a very large amount of work (equivalent to the previous `hg
2202 # a very large amount of work (equivalent to the previous `hg
2203 # update` during the next status after an update).
2203 # update` during the next status after an update).
2204 #
2204 #
2205 # [1] the current code cannot grantee that the `mtime` and
2205 # [1] the current code cannot grantee that the `mtime` and
2206 # `mode` are correct, but the result is "okay in practice".
2206 # `mode` are correct, but the result is "okay in practice".
2207 # (see the comment in `batchget`). #
2207 # (see the comment in `batchget`). #
2208 #
2208 #
2209 # [2] using nano-second precision can greatly help here because
2209 # [2] using nano-second precision can greatly help here because
2210 # it makes the "different write with same mtime" issue
2210 # it makes the "different write with same mtime" issue
2211 # virtually vanish. However, dirstate v1 cannot store such
2211 # virtually vanish. However, dirstate v1 cannot store such
2212 # precision and a bunch of python-runtime, operating-system and
2212 # precision and a bunch of python-runtime, operating-system and
2213 # filesystem does not provide use with such precision, so we
2213 # filesystem does not provide use with such precision, so we
2214 # have to operate as if it wasn't available.
2214 # have to operate as if it wasn't available.
2215 if getfiledata:
2215 if getfiledata:
2216 ambiguous_mtime = {}
2216 ambiguous_mtime = {}
2217 now = timestamp.get_fs_now(repo.vfs)
2217 now = timestamp.get_fs_now(repo.vfs)
2218 if now is None:
2218 if now is None:
2219 # we can't write to the FS, so we won't actually update
2219 # we can't write to the FS, so we won't actually update
2220 # the dirstate content anyway, no need to put cache
2220 # the dirstate content anyway, no need to put cache
2221 # information.
2221 # information.
2222 getfiledata = None
2222 getfiledata = None
2223 else:
2223 else:
2224 now_sec = now[0]
2224 now_sec = now[0]
2225 for f, m in getfiledata.items():
2225 for f, m in getfiledata.items():
2226 if m is not None and m[2][0] >= now_sec:
2226 if m is not None and m[2][0] >= now_sec:
2227 ambiguous_mtime[f] = (m[0], m[1], None)
2227 ambiguous_mtime[f] = (m[0], m[1], None)
2228 for f, m in ambiguous_mtime.items():
2228 for f, m in ambiguous_mtime.items():
2229 getfiledata[f] = m
2229 getfiledata[f] = m
2230
2230
2231 repo.setparents(fp1, fp2)
2231 repo.setparents(fp1, fp2)
2232 mergestatemod.recordupdates(
2232 mergestatemod.recordupdates(
2233 repo, mresult.actionsdict, branchmerge, getfiledata
2233 repo, mresult.actionsdict, branchmerge, getfiledata
2234 )
2234 )
2235 # update completed, clear state
2235 # update completed, clear state
2236 util.unlink(repo.vfs.join(b'updatestate'))
2236 util.unlink(repo.vfs.join(b'updatestate'))
2237
2237
2238 if not branchmerge:
2238 if not branchmerge:
2239 repo.dirstate.setbranch(
2239 repo.dirstate.setbranch(
2240 p2.branch(), repo.currenttransaction()
2240 p2.branch(), repo.currenttransaction()
2241 )
2241 )
2242
2242
2243 # If we're updating to a location, clean up any stale temporary includes
2243 # If we're updating to a location, clean up any stale temporary includes
2244 # (ex: this happens during hg rebase --abort).
2244 # (ex: this happens during hg rebase --abort).
2245 if not branchmerge:
2245 if not branchmerge:
2246 sparse.prunetemporaryincludes(repo)
2246 sparse.prunetemporaryincludes(repo)
2247
2247
2248 if updatedirstate:
2248 if updatedirstate:
2249 repo.hook(
2249 repo.hook(
2250 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
2250 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
2251 )
2251 )
2252 return stats
2252 return stats
2253
2253
2254
2254
2255 def merge(ctx, labels=None, force=False, wc=None):
2255 def merge(ctx, labels=None, force=False, wc=None):
2256 """Merge another topological branch into the working copy.
2256 """Merge another topological branch into the working copy.
2257
2257
2258 force = whether the merge was run with 'merge --force' (deprecated)
2258 force = whether the merge was run with 'merge --force' (deprecated)
2259 """
2259 """
2260
2260
2261 return _update(
2261 return _update(
2262 ctx.repo(),
2262 ctx.repo(),
2263 ctx.rev(),
2263 ctx.rev(),
2264 labels=labels,
2264 labels=labels,
2265 branchmerge=True,
2265 branchmerge=True,
2266 force=force,
2266 force=force,
2267 mergeforce=force,
2267 mergeforce=force,
2268 wc=wc,
2268 wc=wc,
2269 )
2269 )
2270
2270
2271
2271
2272 def update(ctx, updatecheck=None, wc=None):
2272 def update(ctx, updatecheck=None, wc=None):
2273 """Do a regular update to the given commit, aborting if there are conflicts.
2273 """Do a regular update to the given commit, aborting if there are conflicts.
2274
2274
2275 The 'updatecheck' argument can be used to control what to do in case of
2275 The 'updatecheck' argument can be used to control what to do in case of
2276 conflicts.
2276 conflicts.
2277
2277
2278 Note: This is a new, higher-level update() than the one that used to exist
2278 Note: This is a new, higher-level update() than the one that used to exist
2279 in this module. That function is now called _update(). You can hopefully
2279 in this module. That function is now called _update(). You can hopefully
2280 replace your callers to use this new update(), or clean_update(), merge(),
2280 replace your callers to use this new update(), or clean_update(), merge(),
2281 revert_to(), or graft().
2281 revert_to(), or graft().
2282 """
2282 """
2283 return _update(
2283 return _update(
2284 ctx.repo(),
2284 ctx.repo(),
2285 ctx.rev(),
2285 ctx.rev(),
2286 branchmerge=False,
2286 branchmerge=False,
2287 force=False,
2287 force=False,
2288 labels=[b'working copy', b'destination', b'working copy parent'],
2288 labels=[b'working copy', b'destination', b'working copy parent'],
2289 updatecheck=updatecheck,
2289 updatecheck=updatecheck,
2290 wc=wc,
2290 wc=wc,
2291 )
2291 )
2292
2292
2293
2293
2294 def clean_update(ctx, wc=None):
2294 def clean_update(ctx, wc=None):
2295 """Do a clean update to the given commit.
2295 """Do a clean update to the given commit.
2296
2296
2297 This involves updating to the commit and discarding any changes in the
2297 This involves updating to the commit and discarding any changes in the
2298 working copy.
2298 working copy.
2299 """
2299 """
2300 return _update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
2300 return _update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
2301
2301
2302
2302
2303 def revert_to(ctx, matcher=None, wc=None):
2303 def revert_to(ctx, matcher=None, wc=None):
2304 """Revert the working copy to the given commit.
2304 """Revert the working copy to the given commit.
2305
2305
2306 The working copy will keep its current parent(s) but its content will
2306 The working copy will keep its current parent(s) but its content will
2307 be the same as in the given commit.
2307 be the same as in the given commit.
2308 """
2308 """
2309
2309
2310 return _update(
2310 return _update(
2311 ctx.repo(),
2311 ctx.repo(),
2312 ctx.rev(),
2312 ctx.rev(),
2313 branchmerge=False,
2313 branchmerge=False,
2314 force=True,
2314 force=True,
2315 updatedirstate=False,
2315 updatedirstate=False,
2316 matcher=matcher,
2316 matcher=matcher,
2317 wc=wc,
2317 wc=wc,
2318 )
2318 )
2319
2319
2320
2320
2321 def graft(
2321 def graft(
2322 repo,
2322 repo,
2323 ctx,
2323 ctx,
2324 base=None,
2324 base=None,
2325 labels=None,
2325 labels=None,
2326 keepparent=False,
2326 keepparent=False,
2327 keepconflictparent=False,
2327 keepconflictparent=False,
2328 wctx=None,
2328 wctx=None,
2329 ):
2329 ):
2330 """Do a graft-like merge.
2330 """Do a graft-like merge.
2331
2331
2332 This is a merge where the merge ancestor is chosen such that one
2332 This is a merge where the merge ancestor is chosen such that one
2333 or more changesets are grafted onto the current changeset. In
2333 or more changesets are grafted onto the current changeset. In
2334 addition to the merge, this fixes up the dirstate to include only
2334 addition to the merge, this fixes up the dirstate to include only
2335 a single parent (if keepparent is False) and tries to duplicate any
2335 a single parent (if keepparent is False) and tries to duplicate any
2336 renames/copies appropriately.
2336 renames/copies appropriately.
2337
2337
2338 ctx - changeset to rebase
2338 ctx - changeset to rebase
2339 base - merge base, or ctx.p1() if not specified
2339 base - merge base, or ctx.p1() if not specified
2340 labels - merge labels eg ['local', 'graft']
2340 labels - merge labels eg ['local', 'graft']
2341 keepparent - keep second parent if any
2341 keepparent - keep second parent if any
2342 keepconflictparent - if unresolved, keep parent used for the merge
2342 keepconflictparent - if unresolved, keep parent used for the merge
2343
2343
2344 """
2344 """
2345 # If we're grafting a descendant onto an ancestor, be sure to pass
2345 # If we're grafting a descendant onto an ancestor, be sure to pass
2346 # mergeancestor=True to update. This does two things: 1) allows the merge if
2346 # mergeancestor=True to update. This does two things: 1) allows the merge if
2347 # the destination is the same as the parent of the ctx (so we can use graft
2347 # the destination is the same as the parent of the ctx (so we can use graft
2348 # to copy commits), and 2) informs update that the incoming changes are
2348 # to copy commits), and 2) informs update that the incoming changes are
2349 # newer than the destination so it doesn't prompt about "remote changed foo
2349 # newer than the destination so it doesn't prompt about "remote changed foo
2350 # which local deleted".
2350 # which local deleted".
2351 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2351 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2352 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2352 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2353 wctx = wctx or repo[None]
2353 wctx = wctx or repo[None]
2354 pctx = wctx.p1()
2354 pctx = wctx.p1()
2355 base = base or ctx.p1()
2355 base = base or ctx.p1()
2356 mergeancestor = (
2356 mergeancestor = (
2357 repo.changelog.isancestor(pctx.node(), ctx.node())
2357 repo.changelog.isancestor(pctx.node(), ctx.node())
2358 or pctx.rev() == base.rev()
2358 or pctx.rev() == base.rev()
2359 )
2359 )
2360
2360
2361 stats = _update(
2361 stats = _update(
2362 repo,
2362 repo,
2363 ctx.node(),
2363 ctx.node(),
2364 True,
2364 True,
2365 True,
2365 True,
2366 base.node(),
2366 base.node(),
2367 mergeancestor=mergeancestor,
2367 mergeancestor=mergeancestor,
2368 labels=labels,
2368 labels=labels,
2369 wc=wctx,
2369 wc=wctx,
2370 )
2370 )
2371
2371
2372 if keepconflictparent and stats.unresolvedcount:
2372 if keepconflictparent and stats.unresolvedcount:
2373 pother = ctx.node()
2373 pother = ctx.node()
2374 else:
2374 else:
2375 pother = repo.nullid
2375 pother = repo.nullid
2376 parents = ctx.parents()
2376 parents = ctx.parents()
2377 if keepparent and len(parents) == 2 and base in parents:
2377 if keepparent and len(parents) == 2 and base in parents:
2378 parents.remove(base)
2378 parents.remove(base)
2379 pother = parents[0].node()
2379 pother = parents[0].node()
2380 # Never set both parents equal to each other
2380 # Never set both parents equal to each other
2381 if pother == pctx.node():
2381 if pother == pctx.node():
2382 pother = repo.nullid
2382 pother = repo.nullid
2383
2383
2384 if wctx.isinmemory():
2384 if wctx.isinmemory():
2385 wctx.setparents(pctx.node(), pother)
2385 wctx.setparents(pctx.node(), pother)
2386 # fix up dirstate for copies and renames
2386 # fix up dirstate for copies and renames
2387 copies.graftcopies(wctx, ctx, base)
2387 copies.graftcopies(wctx, ctx, base)
2388 else:
2388 else:
2389 with repo.dirstate.changing_parents(repo):
2389 with repo.dirstate.changing_parents(repo):
2390 repo.setparents(pctx.node(), pother)
2390 repo.setparents(pctx.node(), pother)
2391 repo.dirstate.write(repo.currenttransaction())
2391 repo.dirstate.write(repo.currenttransaction())
2392 # fix up dirstate for copies and renames
2392 # fix up dirstate for copies and renames
2393 copies.graftcopies(wctx, ctx, base)
2393 copies.graftcopies(wctx, ctx, base)
2394 return stats
2394 return stats
2395
2395
2396
2396
2397 def back_out(ctx, parent=None, wc=None):
2397 def back_out(ctx, parent=None, wc=None):
2398 if parent is None:
2398 if parent is None:
2399 if ctx.p2() is not None:
2399 if ctx.p2() is not None:
2400 msg = b"must specify parent of merge commit to back out"
2400 msg = b"must specify parent of merge commit to back out"
2401 raise error.ProgrammingError(msg)
2401 raise error.ProgrammingError(msg)
2402 parent = ctx.p1()
2402 parent = ctx.p1()
2403 return _update(
2403 return _update(
2404 ctx.repo(),
2404 ctx.repo(),
2405 parent,
2405 parent,
2406 branchmerge=True,
2406 branchmerge=True,
2407 force=True,
2407 force=True,
2408 ancestor=ctx.node(),
2408 ancestor=ctx.node(),
2409 mergeancestor=False,
2409 mergeancestor=False,
2410 )
2410 )
2411
2411
2412
2412
2413 def purge(
2413 def purge(
2414 repo,
2414 repo,
2415 matcher,
2415 matcher,
2416 unknown=True,
2416 unknown=True,
2417 ignored=False,
2417 ignored=False,
2418 removeemptydirs=True,
2418 removeemptydirs=True,
2419 removefiles=True,
2419 removefiles=True,
2420 abortonerror=False,
2420 abortonerror=False,
2421 noop=False,
2421 noop=False,
2422 confirm=False,
2422 confirm=False,
2423 ):
2423 ):
2424 """Purge the working directory of untracked files.
2424 """Purge the working directory of untracked files.
2425
2425
2426 ``matcher`` is a matcher configured to scan the working directory -
2426 ``matcher`` is a matcher configured to scan the working directory -
2427 potentially a subset.
2427 potentially a subset.
2428
2428
2429 ``unknown`` controls whether unknown files should be purged.
2429 ``unknown`` controls whether unknown files should be purged.
2430
2430
2431 ``ignored`` controls whether ignored files should be purged.
2431 ``ignored`` controls whether ignored files should be purged.
2432
2432
2433 ``removeemptydirs`` controls whether empty directories should be removed.
2433 ``removeemptydirs`` controls whether empty directories should be removed.
2434
2434
2435 ``removefiles`` controls whether files are removed.
2435 ``removefiles`` controls whether files are removed.
2436
2436
2437 ``abortonerror`` causes an exception to be raised if an error occurs
2437 ``abortonerror`` causes an exception to be raised if an error occurs
2438 deleting a file or directory.
2438 deleting a file or directory.
2439
2439
2440 ``noop`` controls whether to actually remove files. If not defined, actions
2440 ``noop`` controls whether to actually remove files. If not defined, actions
2441 will be taken.
2441 will be taken.
2442
2442
2443 ``confirm`` ask confirmation before actually removing anything.
2443 ``confirm`` ask confirmation before actually removing anything.
2444
2444
2445 Returns an iterable of relative paths in the working directory that were
2445 Returns an iterable of relative paths in the working directory that were
2446 or would be removed.
2446 or would be removed.
2447 """
2447 """
2448
2448
2449 def remove(removefn, path):
2449 def remove(removefn, path):
2450 try:
2450 try:
2451 removefn(path)
2451 removefn(path)
2452 except OSError:
2452 except OSError:
2453 m = _(b'%s cannot be removed') % path
2453 m = _(b'%s cannot be removed') % path
2454 if abortonerror:
2454 if abortonerror:
2455 raise error.Abort(m)
2455 raise error.Abort(m)
2456 else:
2456 else:
2457 repo.ui.warn(_(b'warning: %s\n') % m)
2457 repo.ui.warn(_(b'warning: %s\n') % m)
2458
2458
2459 # There's no API to copy a matcher. So mutate the passed matcher and
2459 # There's no API to copy a matcher. So mutate the passed matcher and
2460 # restore it when we're done.
2460 # restore it when we're done.
2461 oldtraversedir = matcher.traversedir
2461 oldtraversedir = matcher.traversedir
2462
2462
2463 res = []
2463 res = []
2464
2464
2465 try:
2465 try:
2466 if removeemptydirs:
2466 if removeemptydirs:
2467 directories = []
2467 directories = []
2468 matcher.traversedir = directories.append
2468 matcher.traversedir = directories.append
2469
2469
2470 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2470 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2471
2471
2472 if confirm:
2472 if confirm:
2473 msg = None
2473 msg = None
2474 nb_ignored = len(status.ignored)
2474 nb_ignored = len(status.ignored)
2475 nb_unknown = len(status.unknown)
2475 nb_unknown = len(status.unknown)
2476 if nb_unknown and nb_ignored:
2476 if nb_unknown and nb_ignored:
2477 msg = _(b"permanently delete %d unknown and %d ignored files?")
2477 msg = _(b"permanently delete %d unknown and %d ignored files?")
2478 msg %= (nb_unknown, nb_ignored)
2478 msg %= (nb_unknown, nb_ignored)
2479 elif nb_unknown:
2479 elif nb_unknown:
2480 msg = _(b"permanently delete %d unknown files?")
2480 msg = _(b"permanently delete %d unknown files?")
2481 msg %= nb_unknown
2481 msg %= nb_unknown
2482 elif nb_ignored:
2482 elif nb_ignored:
2483 msg = _(b"permanently delete %d ignored files?")
2483 msg = _(b"permanently delete %d ignored files?")
2484 msg %= nb_ignored
2484 msg %= nb_ignored
2485 elif removeemptydirs:
2485 elif removeemptydirs:
2486 dir_count = 0
2486 dir_count = 0
2487 for f in directories:
2487 for f in directories:
2488 if matcher(f) and not repo.wvfs.listdir(f):
2488 if matcher(f) and not repo.wvfs.listdir(f):
2489 dir_count += 1
2489 dir_count += 1
2490 if dir_count:
2490 if dir_count:
2491 msg = _(
2491 msg = _(
2492 b"permanently delete at least %d empty directories?"
2492 b"permanently delete at least %d empty directories?"
2493 )
2493 )
2494 msg %= dir_count
2494 msg %= dir_count
2495 if msg is None:
2495 if msg is None:
2496 return res
2496 return res
2497 else:
2497 else:
2498 msg += b" (yN)$$ &Yes $$ &No"
2498 msg += b" (yN)$$ &Yes $$ &No"
2499 if repo.ui.promptchoice(msg, default=1) == 1:
2499 if repo.ui.promptchoice(msg, default=1) == 1:
2500 raise error.CanceledError(_(b'removal cancelled'))
2500 raise error.CanceledError(_(b'removal cancelled'))
2501
2501
2502 if removefiles:
2502 if removefiles:
2503 for f in sorted(status.unknown + status.ignored):
2503 for f in sorted(status.unknown + status.ignored):
2504 if not noop:
2504 if not noop:
2505 repo.ui.note(_(b'removing file %s\n') % f)
2505 repo.ui.note(_(b'removing file %s\n') % f)
2506 remove(repo.wvfs.unlink, f)
2506 remove(repo.wvfs.unlink, f)
2507 res.append(f)
2507 res.append(f)
2508
2508
2509 if removeemptydirs:
2509 if removeemptydirs:
2510 for f in sorted(directories, reverse=True):
2510 for f in sorted(directories, reverse=True):
2511 if matcher(f) and not repo.wvfs.listdir(f):
2511 if matcher(f) and not repo.wvfs.listdir(f):
2512 if not noop:
2512 if not noop:
2513 repo.ui.note(_(b'removing directory %s\n') % f)
2513 repo.ui.note(_(b'removing directory %s\n') % f)
2514 remove(repo.wvfs.rmdir, f)
2514 remove(repo.wvfs.rmdir, f)
2515 res.append(f)
2515 res.append(f)
2516
2516
2517 return res
2517 return res
2518
2518
2519 finally:
2519 finally:
2520 matcher.traversedir = oldtraversedir
2520 matcher.traversedir = oldtraversedir
@@ -1,905 +1,905
1 # coding: utf-8
1 # coding: utf-8
2 # metadata.py -- code related to various metadata computation and access.
2 # metadata.py -- code related to various metadata computation and access.
3 #
3 #
4 # Copyright 2019 Google, Inc <martinvonz@google.com>
4 # Copyright 2019 Google, Inc <martinvonz@google.com>
5 # Copyright 2020 Pierre-Yves David <pierre-yves.david@octobus.net>
5 # Copyright 2020 Pierre-Yves David <pierre-yves.david@octobus.net>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import multiprocessing
10 import multiprocessing
11 import struct
11 import struct
12
12
13 from .node import nullrev
13 from .node import nullrev
14 from . import (
14 from . import (
15 error,
15 error,
16 util,
16 util,
17 )
17 )
18
18
19 from .revlogutils import (
19 from .revlogutils import (
20 flagutil as sidedataflag,
20 flagutil as sidedataflag,
21 sidedata as sidedatamod,
21 sidedata as sidedatamod,
22 )
22 )
23
23
24
24
25 class ChangingFiles:
25 class ChangingFiles:
26 """A class recording the changes made to files by a changeset
26 """A class recording the changes made to files by a changeset
27
27
28 Actions performed on files are gathered into 3 sets:
28 Actions performed on files are gathered into 3 sets:
29
29
30 - added: files actively added in the changeset.
30 - added: files actively added in the changeset.
31 - merged: files whose history got merged
31 - merged: files whose history got merged
32 - removed: files removed in the revision
32 - removed: files removed in the revision
33 - salvaged: files that might have been deleted by a merge but were not
33 - salvaged: files that might have been deleted by a merge but were not
34 - touched: files affected by the merge
34 - touched: files affected by the merge
35
35
36 and copies information is held by 2 mappings
36 and copies information is held by 2 mappings
37
37
38 - copied_from_p1: {"<new-name>": "<source-name-in-p1>"} mapping for copies
38 - copied_from_p1: {"<new-name>": "<source-name-in-p1>"} mapping for copies
39 - copied_from_p2: {"<new-name>": "<source-name-in-p2>"} mapping for copies
39 - copied_from_p2: {"<new-name>": "<source-name-in-p2>"} mapping for copies
40
40
41 See their inline help for details.
41 See their inline help for details.
42 """
42 """
43
43
44 def __init__(
44 def __init__(
45 self,
45 self,
46 touched=None,
46 touched=None,
47 added=None,
47 added=None,
48 removed=None,
48 removed=None,
49 merged=None,
49 merged=None,
50 salvaged=None,
50 salvaged=None,
51 p1_copies=None,
51 p1_copies=None,
52 p2_copies=None,
52 p2_copies=None,
53 ):
53 ):
54 self._added = set(() if added is None else added)
54 self._added = set(() if added is None else added)
55 self._merged = set(() if merged is None else merged)
55 self._merged = set(() if merged is None else merged)
56 self._removed = set(() if removed is None else removed)
56 self._removed = set(() if removed is None else removed)
57 self._touched = set(() if touched is None else touched)
57 self._touched = set(() if touched is None else touched)
58 self._salvaged = set(() if salvaged is None else salvaged)
58 self._salvaged = set(() if salvaged is None else salvaged)
59 self._touched.update(self._added)
59 self._touched.update(self._added)
60 self._touched.update(self._merged)
60 self._touched.update(self._merged)
61 self._touched.update(self._removed)
61 self._touched.update(self._removed)
62 self._p1_copies = dict(() if p1_copies is None else p1_copies)
62 self._p1_copies = dict(() if p1_copies is None else p1_copies)
63 self._p2_copies = dict(() if p2_copies is None else p2_copies)
63 self._p2_copies = dict(() if p2_copies is None else p2_copies)
64
64
65 def __eq__(self, other):
65 def __eq__(self, other):
66 return (
66 return (
67 self.added == other.added
67 self.added == other.added
68 and self.merged == other.merged
68 and self.merged == other.merged
69 and self.removed == other.removed
69 and self.removed == other.removed
70 and self.salvaged == other.salvaged
70 and self.salvaged == other.salvaged
71 and self.touched == other.touched
71 and self.touched == other.touched
72 and self.copied_from_p1 == other.copied_from_p1
72 and self.copied_from_p1 == other.copied_from_p1
73 and self.copied_from_p2 == other.copied_from_p2
73 and self.copied_from_p2 == other.copied_from_p2
74 )
74 )
75
75
76 @property
76 @property
77 def has_copies_info(self):
77 def has_copies_info(self):
78 return bool(
78 return bool(
79 self.removed
79 self.removed
80 or self.merged
80 or self.merged
81 or self.salvaged
81 or self.salvaged
82 or self.copied_from_p1
82 or self.copied_from_p1
83 or self.copied_from_p2
83 or self.copied_from_p2
84 )
84 )
85
85
86 @util.propertycache
86 @util.propertycache
87 def added(self):
87 def added(self):
88 """files actively added in the changeset
88 """files actively added in the changeset
89
89
90 Any file present in that revision that was absent in all the changeset's
90 Any file present in that revision that was absent in all the changeset's
91 parents.
91 parents.
92
92
93 In case of merge, this means a file absent in one of the parents but
93 In case of merge, this means a file absent in one of the parents but
94 existing in the other will *not* be contained in this set. (They were
94 existing in the other will *not* be contained in this set. (They were
95 added by an ancestor)
95 added by an ancestor)
96 """
96 """
97 return frozenset(self._added)
97 return frozenset(self._added)
98
98
99 def mark_added(self, filename):
99 def mark_added(self, filename):
100 if 'added' in vars(self):
100 if 'added' in vars(self):
101 del self.added
101 del self.added
102 self._added.add(filename)
102 self._added.add(filename)
103 self.mark_touched(filename)
103 self.mark_touched(filename)
104
104
105 def update_added(self, filenames):
105 def update_added(self, filenames):
106 for f in filenames:
106 for f in filenames:
107 self.mark_added(f)
107 self.mark_added(f)
108
108
109 @util.propertycache
109 @util.propertycache
110 def merged(self):
110 def merged(self):
111 """files actively merged during a merge
111 """files actively merged during a merge
112
112
113 Any modified files which had modification on both size that needed merging.
113 Any modified files which had modification on both size that needed merging.
114
114
115 In this case a new filenode was created and it has two parents.
115 In this case a new filenode was created and it has two parents.
116 """
116 """
117 return frozenset(self._merged)
117 return frozenset(self._merged)
118
118
119 def mark_merged(self, filename):
119 def mark_merged(self, filename):
120 if 'merged' in vars(self):
120 if 'merged' in vars(self):
121 del self.merged
121 del self.merged
122 self._merged.add(filename)
122 self._merged.add(filename)
123 self.mark_touched(filename)
123 self.mark_touched(filename)
124
124
125 def update_merged(self, filenames):
125 def update_merged(self, filenames):
126 for f in filenames:
126 for f in filenames:
127 self.mark_merged(f)
127 self.mark_merged(f)
128
128
129 @util.propertycache
129 @util.propertycache
130 def removed(self):
130 def removed(self):
131 """files actively removed by the changeset
131 """files actively removed by the changeset
132
132
133 In case of merge this will only contain the set of files removing "new"
133 In case of merge this will only contain the set of files removing "new"
134 content. For any file absent in the current changeset:
134 content. For any file absent in the current changeset:
135
135
136 a) If the file exists in both parents, it is clearly "actively" removed
136 a) If the file exists in both parents, it is clearly "actively" removed
137 by this changeset.
137 by this changeset.
138
138
139 b) If a file exists in only one parent and in none of the common
139 b) If a file exists in only one parent and in none of the common
140 ancestors, then the file was newly added in one of the merged branches
140 ancestors, then the file was newly added in one of the merged branches
141 and then got "actively" removed.
141 and then got "actively" removed.
142
142
143 c) If a file exists in only one parent and at least one of the common
143 c) If a file exists in only one parent and at least one of the common
144 ancestors using the same filenode, then the file was unchanged on one
144 ancestors using the same filenode, then the file was unchanged on one
145 side and deleted on the other side. The merge "passively" propagated
145 side and deleted on the other side. The merge "passively" propagated
146 that deletion, but didn't "actively" remove the file. In this case the
146 that deletion, but didn't "actively" remove the file. In this case the
147 file is *not* included in the `removed` set.
147 file is *not* included in the `removed` set.
148
148
149 d) If a file exists in only one parent and at least one of the common
149 d) If a file exists in only one parent and at least one of the common
150 ancestors using a different filenode, then the file was changed on one
150 ancestors using a different filenode, then the file was changed on one
151 side and removed on the other side. The merge process "actively"
151 side and removed on the other side. The merge process "actively"
152 decided to drop the new change and delete the file. Unlike in the
152 decided to drop the new change and delete the file. Unlike in the
153 previous case, (c), the file included in the `removed` set.
153 previous case, (c), the file included in the `removed` set.
154
154
155 Summary table for merge:
155 Summary table for merge:
156
156
157 case | exists in parents | exists in gca || removed
157 case | exists in parents | exists in gca || removed
158 (a) | both | * || yes
158 (a) | both | * || yes
159 (b) | one | none || yes
159 (b) | one | none || yes
160 (c) | one | same filenode || no
160 (c) | one | same filenode || no
161 (d) | one | new filenode || yes
161 (d) | one | new filenode || yes
162 """
162 """
163 return frozenset(self._removed)
163 return frozenset(self._removed)
164
164
165 def mark_removed(self, filename):
165 def mark_removed(self, filename):
166 if 'removed' in vars(self):
166 if 'removed' in vars(self):
167 del self.removed
167 del self.removed
168 self._removed.add(filename)
168 self._removed.add(filename)
169 self.mark_touched(filename)
169 self.mark_touched(filename)
170
170
171 def update_removed(self, filenames):
171 def update_removed(self, filenames):
172 for f in filenames:
172 for f in filenames:
173 self.mark_removed(f)
173 self.mark_removed(f)
174
174
175 @util.propertycache
175 @util.propertycache
176 def salvaged(self):
176 def salvaged(self):
177 """files that might have been deleted by a merge, but still exists.
177 """files that might have been deleted by a merge, but still exists.
178
178
179 During a merge, the manifest merging might select some files for
179 During a merge, the manifest merging might select some files for
180 removal, or for a removed/changed conflict. If at commit time the file
180 removal, or for a removed/changed conflict. If at commit time the file
181 still exists, its removal was "reverted" and the file is "salvaged"
181 still exists, its removal was "reverted" and the file is "salvaged"
182 """
182 """
183 return frozenset(self._salvaged)
183 return frozenset(self._salvaged)
184
184
185 def mark_salvaged(self, filename):
185 def mark_salvaged(self, filename):
186 if "salvaged" in vars(self):
186 if "salvaged" in vars(self):
187 del self.salvaged
187 del self.salvaged
188 self._salvaged.add(filename)
188 self._salvaged.add(filename)
189 self.mark_touched(filename)
189 self.mark_touched(filename)
190
190
191 def update_salvaged(self, filenames):
191 def update_salvaged(self, filenames):
192 for f in filenames:
192 for f in filenames:
193 self.mark_salvaged(f)
193 self.mark_salvaged(f)
194
194
195 @util.propertycache
195 @util.propertycache
196 def touched(self):
196 def touched(self):
197 """files either actively modified, added or removed"""
197 """files either actively modified, added or removed"""
198 return frozenset(self._touched)
198 return frozenset(self._touched)
199
199
200 def mark_touched(self, filename):
200 def mark_touched(self, filename):
201 if 'touched' in vars(self):
201 if 'touched' in vars(self):
202 del self.touched
202 del self.touched
203 self._touched.add(filename)
203 self._touched.add(filename)
204
204
205 def update_touched(self, filenames):
205 def update_touched(self, filenames):
206 for f in filenames:
206 for f in filenames:
207 self.mark_touched(f)
207 self.mark_touched(f)
208
208
209 @util.propertycache
209 @util.propertycache
210 def copied_from_p1(self):
210 def copied_from_p1(self):
211 return self._p1_copies.copy()
211 return self._p1_copies.copy()
212
212
213 def mark_copied_from_p1(self, source, dest):
213 def mark_copied_from_p1(self, source, dest):
214 if 'copied_from_p1' in vars(self):
214 if 'copied_from_p1' in vars(self):
215 del self.copied_from_p1
215 del self.copied_from_p1
216 self._p1_copies[dest] = source
216 self._p1_copies[dest] = source
217
217
218 def update_copies_from_p1(self, copies):
218 def update_copies_from_p1(self, copies):
219 for dest, source in copies.items():
219 for dest, source in copies.items():
220 self.mark_copied_from_p1(source, dest)
220 self.mark_copied_from_p1(source, dest)
221
221
222 @util.propertycache
222 @util.propertycache
223 def copied_from_p2(self):
223 def copied_from_p2(self):
224 return self._p2_copies.copy()
224 return self._p2_copies.copy()
225
225
226 def mark_copied_from_p2(self, source, dest):
226 def mark_copied_from_p2(self, source, dest):
227 if 'copied_from_p2' in vars(self):
227 if 'copied_from_p2' in vars(self):
228 del self.copied_from_p2
228 del self.copied_from_p2
229 self._p2_copies[dest] = source
229 self._p2_copies[dest] = source
230
230
231 def update_copies_from_p2(self, copies):
231 def update_copies_from_p2(self, copies):
232 for dest, source in copies.items():
232 for dest, source in copies.items():
233 self.mark_copied_from_p2(source, dest)
233 self.mark_copied_from_p2(source, dest)
234
234
235
235
236 def compute_all_files_changes(ctx):
236 def compute_all_files_changes(ctx):
237 """compute the files changed by a revision"""
237 """compute the files changed by a revision"""
238 p1 = ctx.p1()
238 p1 = ctx.p1()
239 p2 = ctx.p2()
239 p2 = ctx.p2()
240 if p1.rev() == nullrev and p2.rev() == nullrev:
240 if p1.rev() == nullrev and p2.rev() == nullrev:
241 return _process_root(ctx)
241 return _process_root(ctx)
242 elif p1.rev() != nullrev and p2.rev() == nullrev:
242 elif p1.rev() != nullrev and p2.rev() == nullrev:
243 return _process_linear(p1, ctx)
243 return _process_linear(p1, ctx)
244 elif p1.rev() == nullrev and p2.rev() != nullrev:
244 elif p1.rev() == nullrev and p2.rev() != nullrev:
245 # In the wild, one can encounter changeset where p1 is null but p2 is not
245 # In the wild, one can encounter changeset where p1 is null but p2 is not
246 return _process_linear(p1, ctx, parent=2)
246 return _process_linear(p2, ctx, parent=2)
247 elif p1.rev() == p2.rev():
247 elif p1.rev() == p2.rev():
248 # In the wild, one can encounter such "non-merge"
248 # In the wild, one can encounter such "non-merge"
249 return _process_linear(p1, ctx)
249 return _process_linear(p1, ctx)
250 else:
250 else:
251 return _process_merge(p1, p2, ctx)
251 return _process_merge(p1, p2, ctx)
252
252
253
253
254 def _process_root(ctx):
254 def _process_root(ctx):
255 """compute the appropriate changed files for a changeset with no parents"""
255 """compute the appropriate changed files for a changeset with no parents"""
256 # Simple, there was nothing before it, so everything is added.
256 # Simple, there was nothing before it, so everything is added.
257 md = ChangingFiles()
257 md = ChangingFiles()
258 manifest = ctx.manifest()
258 manifest = ctx.manifest()
259 for filename in manifest:
259 for filename in manifest:
260 md.mark_added(filename)
260 md.mark_added(filename)
261 return md
261 return md
262
262
263
263
264 def _process_linear(parent_ctx, children_ctx, parent=1):
264 def _process_linear(parent_ctx, children_ctx, parent=1):
265 """compute the appropriate changed files for a changeset with a single parent"""
265 """compute the appropriate changed files for a changeset with a single parent"""
266 md = ChangingFiles()
266 md = ChangingFiles()
267 parent_manifest = parent_ctx.manifest()
267 parent_manifest = parent_ctx.manifest()
268 children_manifest = children_ctx.manifest()
268 children_manifest = children_ctx.manifest()
269
269
270 copies_candidate = []
270 copies_candidate = []
271
271
272 for filename, d in parent_manifest.diff(children_manifest).items():
272 for filename, d in parent_manifest.diff(children_manifest).items():
273 if d[1][0] is None:
273 if d[1][0] is None:
274 # no filenode for the "new" value, file is absent
274 # no filenode for the "new" value, file is absent
275 md.mark_removed(filename)
275 md.mark_removed(filename)
276 else:
276 else:
277 copies_candidate.append(filename)
277 copies_candidate.append(filename)
278 if d[0][0] is None:
278 if d[0][0] is None:
279 # not filenode for the "old" value file was absent
279 # not filenode for the "old" value file was absent
280 md.mark_added(filename)
280 md.mark_added(filename)
281 else:
281 else:
282 # filenode for both "old" and "new"
282 # filenode for both "old" and "new"
283 md.mark_touched(filename)
283 md.mark_touched(filename)
284
284
285 if parent == 1:
285 if parent == 1:
286 copied = md.mark_copied_from_p1
286 copied = md.mark_copied_from_p1
287 elif parent == 2:
287 elif parent == 2:
288 copied = md.mark_copied_from_p2
288 copied = md.mark_copied_from_p2
289 else:
289 else:
290 assert False, "bad parent value %d" % parent
290 assert False, "bad parent value %d" % parent
291
291
292 for filename in copies_candidate:
292 for filename in copies_candidate:
293 copy_info = children_ctx[filename].renamed()
293 copy_info = children_ctx[filename].renamed()
294 if copy_info:
294 if copy_info:
295 source, srcnode = copy_info
295 source, srcnode = copy_info
296 copied(source, filename)
296 copied(source, filename)
297
297
298 return md
298 return md
299
299
300
300
301 def _process_merge(p1_ctx, p2_ctx, ctx):
301 def _process_merge(p1_ctx, p2_ctx, ctx):
302 """compute the appropriate changed files for a changeset with two parents
302 """compute the appropriate changed files for a changeset with two parents
303
303
304 This is a more advance case. The information we need to record is summarise
304 This is a more advance case. The information we need to record is summarise
305 in the following table:
305 in the following table:
306
306
307 β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”
307 β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”
308 β”‚ diff β•² diff β”‚ ΓΈ β”‚ (Some, None) β”‚ (None, Some) β”‚ (Some, Some) β”‚
308 β”‚ diff β•² diff β”‚ ΓΈ β”‚ (Some, None) β”‚ (None, Some) β”‚ (Some, Some) β”‚
309 β”‚ p2 β•² p1 β”‚ β”‚ β”‚ β”‚ β”‚
309 β”‚ p2 β•² p1 β”‚ β”‚ β”‚ β”‚ β”‚
310 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
310 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
311 β”‚ β”‚ β”‚πŸ„± No Changes β”‚πŸ„³ No Changes β”‚ β”‚
311 β”‚ β”‚ β”‚πŸ„± No Changes β”‚πŸ„³ No Changes β”‚ β”‚
312 β”‚ ΓΈ β”‚πŸ„° No Changes β”‚ OR β”‚ OR β”‚πŸ„΅ No Changes β”‚
312 β”‚ ΓΈ β”‚πŸ„° No Changes β”‚ OR β”‚ OR β”‚πŸ„΅ No Changes β”‚
313 β”‚ β”‚ β”‚πŸ„² Deleted[1] β”‚πŸ„΄ Salvaged[2]β”‚ [3] β”‚
313 β”‚ β”‚ β”‚πŸ„² Deleted[1] β”‚πŸ„΄ Salvaged[2]β”‚ [3] β”‚
314 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
314 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
315 β”‚ β”‚πŸ„Ά No Changes β”‚ β”‚ β”‚ β”‚
315 β”‚ β”‚πŸ„Ά No Changes β”‚ β”‚ β”‚ β”‚
316 β”‚ (Some, None) β”‚ OR β”‚πŸ„» Deleted β”‚ ΓΈ β”‚ ΓΈ β”‚
316 β”‚ (Some, None) β”‚ OR β”‚πŸ„» Deleted β”‚ ΓΈ β”‚ ΓΈ β”‚
317 β”‚ β”‚πŸ„· Deleted[1] β”‚ β”‚ β”‚ β”‚
317 β”‚ β”‚πŸ„· Deleted[1] β”‚ β”‚ β”‚ β”‚
318 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
318 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
319 β”‚ β”‚πŸ„Έ No Changes β”‚ β”‚ β”‚ πŸ„½ Touched β”‚
319 β”‚ β”‚πŸ„Έ No Changes β”‚ β”‚ β”‚ πŸ„½ Touched β”‚
320 β”‚ (None, Some) β”‚ OR β”‚ ΓΈ β”‚πŸ„Ό Added β”‚OR πŸ…€ Salvaged β”‚
320 β”‚ (None, Some) β”‚ OR β”‚ ΓΈ β”‚πŸ„Ό Added β”‚OR πŸ…€ Salvaged β”‚
321 β”‚ β”‚πŸ„Ή Salvaged[2]β”‚ β”‚ (copied?) β”‚ (copied?) β”‚
321 β”‚ β”‚πŸ„Ή Salvaged[2]β”‚ β”‚ (copied?) β”‚ (copied?) β”‚
322 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
322 β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€
323 β”‚ β”‚ β”‚ β”‚ πŸ„Ύ Touched β”‚ πŸ„Ώ Merged β”‚
323 β”‚ β”‚ β”‚ β”‚ πŸ„Ύ Touched β”‚ πŸ„Ώ Merged β”‚
324 β”‚ (Some, Some) β”‚πŸ„Ί No Changes β”‚ ΓΈ β”‚OR πŸ… Salvaged β”‚OR πŸ…‚ Touched β”‚
324 β”‚ (Some, Some) β”‚πŸ„Ί No Changes β”‚ ΓΈ β”‚OR πŸ… Salvaged β”‚OR πŸ…‚ Touched β”‚
325 β”‚ β”‚ [3] β”‚ β”‚ (copied?) β”‚ (copied?) β”‚
325 β”‚ β”‚ [3] β”‚ β”‚ (copied?) β”‚ (copied?) β”‚
326 β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜
326 β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜
327
327
328 Special case [1]:
328 Special case [1]:
329
329
330 The situation is:
330 The situation is:
331 - parent-A: file exists,
331 - parent-A: file exists,
332 - parent-B: no file,
332 - parent-B: no file,
333 - working-copy: no file.
333 - working-copy: no file.
334
334
335 Detecting a "deletion" will depend on the presence of actual change on
335 Detecting a "deletion" will depend on the presence of actual change on
336 the "parent-A" branch:
336 the "parent-A" branch:
337
337
338 Subcase πŸ„± or πŸ„Ά : if the state of the file in "parent-A" is unchanged
338 Subcase πŸ„± or πŸ„Ά : if the state of the file in "parent-A" is unchanged
339 compared to the merge ancestors, then parent-A branch left the file
339 compared to the merge ancestors, then parent-A branch left the file
340 untouched while parent-B deleted it. We simply apply the change from
340 untouched while parent-B deleted it. We simply apply the change from
341 "parent-B" branch the file was automatically dropped.
341 "parent-B" branch the file was automatically dropped.
342 The result is:
342 The result is:
343 - file is not recorded as touched by the merge.
343 - file is not recorded as touched by the merge.
344
344
345 Subcase πŸ„² or πŸ„· : otherwise, the change from parent-A branch were explicitly dropped and
345 Subcase πŸ„² or πŸ„· : otherwise, the change from parent-A branch were explicitly dropped and
346 the file was "deleted again". From a user perspective, the message
346 the file was "deleted again". From a user perspective, the message
347 about "locally changed" while "remotely deleted" (or the other way
347 about "locally changed" while "remotely deleted" (or the other way
348 around) was issued and the user chose to deleted the file.
348 around) was issued and the user chose to deleted the file.
349 The result:
349 The result:
350 - file is recorded as touched by the merge.
350 - file is recorded as touched by the merge.
351
351
352
352
353 Special case [2]:
353 Special case [2]:
354
354
355 The situation is:
355 The situation is:
356 - parent-A: no file,
356 - parent-A: no file,
357 - parent-B: file,
357 - parent-B: file,
358 - working-copy: file (same content as parent-B).
358 - working-copy: file (same content as parent-B).
359
359
360 There are three subcases depending on the ancestors contents:
360 There are three subcases depending on the ancestors contents:
361
361
362 - A) the file is missing in all ancestors,
362 - A) the file is missing in all ancestors,
363 - B) at least one ancestor has the file with filenode β‰  from parent-B,
363 - B) at least one ancestor has the file with filenode β‰  from parent-B,
364 - C) all ancestors use the same filenode as parent-B,
364 - C) all ancestors use the same filenode as parent-B,
365
365
366 Subcase (A) is the simpler, nothing happend on parent-A side while
366 Subcase (A) is the simpler, nothing happend on parent-A side while
367 parent-B added it.
367 parent-B added it.
368
368
369 The result:
369 The result:
370 - the file is not marked as touched by the merge.
370 - the file is not marked as touched by the merge.
371
371
372 Subcase (B) is the counter part of "Special case [1]", the file was
372 Subcase (B) is the counter part of "Special case [1]", the file was
373 modified on parent-B side, while parent-A side deleted it. However this
373 modified on parent-B side, while parent-A side deleted it. However this
374 time, the conflict was solved by keeping the file (and its
374 time, the conflict was solved by keeping the file (and its
375 modification). We consider the file as "salvaged".
375 modification). We consider the file as "salvaged".
376
376
377 The result:
377 The result:
378 - the file is marked as "salvaged" by the merge.
378 - the file is marked as "salvaged" by the merge.
379
379
380 Subcase (C) is subtle variation of the case above. In this case, the
380 Subcase (C) is subtle variation of the case above. In this case, the
381 file in unchanged on the parent-B side and actively removed on the
381 file in unchanged on the parent-B side and actively removed on the
382 parent-A side. So the merge machinery correctly decide it should be
382 parent-A side. So the merge machinery correctly decide it should be
383 removed. However, the file was explicitly restored to its parent-B
383 removed. However, the file was explicitly restored to its parent-B
384 content before the merge was commited. The file is be marked
384 content before the merge was commited. The file is be marked
385 as salvaged too. From the merge result perspective, this is similar to
385 as salvaged too. From the merge result perspective, this is similar to
386 Subcase (B), however from the merge resolution perspective they differ
386 Subcase (B), however from the merge resolution perspective they differ
387 since in (C), there was some conflict not obvious solution to the
387 since in (C), there was some conflict not obvious solution to the
388 merge (That got reversed)
388 merge (That got reversed)
389
389
390 Special case [3]:
390 Special case [3]:
391
391
392 The situation is:
392 The situation is:
393 - parent-A: file,
393 - parent-A: file,
394 - parent-B: file (different filenode as parent-A),
394 - parent-B: file (different filenode as parent-A),
395 - working-copy: file (same filenode as parent-B).
395 - working-copy: file (same filenode as parent-B).
396
396
397 This case is in theory much simple, for this to happens, this mean the
397 This case is in theory much simple, for this to happens, this mean the
398 filenode in parent-A is purely replacing the one in parent-B (either a
398 filenode in parent-A is purely replacing the one in parent-B (either a
399 descendant, or a full new file history, see changeset). So the merge
399 descendant, or a full new file history, see changeset). So the merge
400 introduce no changes, and the file is not affected by the merge...
400 introduce no changes, and the file is not affected by the merge...
401
401
402 However, in the wild it is possible to find commit with the above is not
402 However, in the wild it is possible to find commit with the above is not
403 True. For example repository have some commit where the *new* node is an
403 True. For example repository have some commit where the *new* node is an
404 ancestor of the node in parent-A, or where parent-A and parent-B are two
404 ancestor of the node in parent-A, or where parent-A and parent-B are two
405 branches of the same file history, yet not merge-filenode were created
405 branches of the same file history, yet not merge-filenode were created
406 (while the "merge" should have led to a "modification").
406 (while the "merge" should have led to a "modification").
407
407
408 Detecting such cases (and not recording the file as modified) would be a
408 Detecting such cases (and not recording the file as modified) would be a
409 nice bonus. However do not any of this yet.
409 nice bonus. However do not any of this yet.
410 """
410 """
411
411
412 repo = ctx.repo()
412 repo = ctx.repo()
413 md = ChangingFiles()
413 md = ChangingFiles()
414
414
415 m = ctx.manifest()
415 m = ctx.manifest()
416 p1m = p1_ctx.manifest()
416 p1m = p1_ctx.manifest()
417 p2m = p2_ctx.manifest()
417 p2m = p2_ctx.manifest()
418 diff_p1 = p1m.diff(m)
418 diff_p1 = p1m.diff(m)
419 diff_p2 = p2m.diff(m)
419 diff_p2 = p2m.diff(m)
420
420
421 cahs = ctx.repo().changelog.commonancestorsheads(
421 cahs = ctx.repo().changelog.commonancestorsheads(
422 p1_ctx.node(), p2_ctx.node()
422 p1_ctx.node(), p2_ctx.node()
423 )
423 )
424 if not cahs:
424 if not cahs:
425 cahs = [nullrev]
425 cahs = [nullrev]
426 mas = [ctx.repo()[r].manifest() for r in cahs]
426 mas = [ctx.repo()[r].manifest() for r in cahs]
427
427
428 copy_candidates = []
428 copy_candidates = []
429
429
430 # Dealing with case πŸ„° happens automatically. Since there are no entry in
430 # Dealing with case πŸ„° happens automatically. Since there are no entry in
431 # d1 nor d2, we won't iterate on it ever.
431 # d1 nor d2, we won't iterate on it ever.
432
432
433 # Iteration over d1 content will deal with all cases, but the one in the
433 # Iteration over d1 content will deal with all cases, but the one in the
434 # first column of the table.
434 # first column of the table.
435 for filename, d1 in diff_p1.items():
435 for filename, d1 in diff_p1.items():
436 d2 = diff_p2.pop(filename, None)
436 d2 = diff_p2.pop(filename, None)
437
437
438 if d2 is None:
438 if d2 is None:
439 # this deal with the first line of the table.
439 # this deal with the first line of the table.
440 _process_other_unchanged(md, mas, filename, d1)
440 _process_other_unchanged(md, mas, filename, d1)
441 else:
441 else:
442 if d1[0][0] is None and d2[0][0] is None:
442 if d1[0][0] is None and d2[0][0] is None:
443 # case πŸ„Ό β€” both deleted the file.
443 # case πŸ„Ό β€” both deleted the file.
444 md.mark_added(filename)
444 md.mark_added(filename)
445 copy_candidates.append(filename)
445 copy_candidates.append(filename)
446 elif d1[1][0] is None and d2[1][0] is None:
446 elif d1[1][0] is None and d2[1][0] is None:
447 # case πŸ„» β€” both deleted the file.
447 # case πŸ„» β€” both deleted the file.
448 md.mark_removed(filename)
448 md.mark_removed(filename)
449 elif d1[1][0] is not None and d2[1][0] is not None:
449 elif d1[1][0] is not None and d2[1][0] is not None:
450 if d1[0][0] is None or d2[0][0] is None:
450 if d1[0][0] is None or d2[0][0] is None:
451 if any(_find(ma, filename) is not None for ma in mas):
451 if any(_find(ma, filename) is not None for ma in mas):
452 # case πŸ…€ or πŸ…
452 # case πŸ…€ or πŸ…
453 md.mark_salvaged(filename)
453 md.mark_salvaged(filename)
454 else:
454 else:
455 # case πŸ„½ πŸ„Ύ : touched
455 # case πŸ„½ πŸ„Ύ : touched
456 md.mark_touched(filename)
456 md.mark_touched(filename)
457 else:
457 else:
458 fctx = repo.filectx(filename, fileid=d1[1][0])
458 fctx = repo.filectx(filename, fileid=d1[1][0])
459 if fctx.p2().rev() == nullrev:
459 if fctx.p2().rev() == nullrev:
460 # case πŸ…‚
460 # case πŸ…‚
461 # lets assume we can trust the file history. If the
461 # lets assume we can trust the file history. If the
462 # filenode is not a merge, the file was not merged.
462 # filenode is not a merge, the file was not merged.
463 md.mark_touched(filename)
463 md.mark_touched(filename)
464 else:
464 else:
465 # case πŸ„Ώ
465 # case πŸ„Ώ
466 md.mark_merged(filename)
466 md.mark_merged(filename)
467 copy_candidates.append(filename)
467 copy_candidates.append(filename)
468 else:
468 else:
469 # Impossible case, the post-merge file status cannot be None on
469 # Impossible case, the post-merge file status cannot be None on
470 # one side and Something on the other side.
470 # one side and Something on the other side.
471 assert False, "unreachable"
471 assert False, "unreachable"
472
472
473 # Iteration over remaining d2 content deal with the first column of the
473 # Iteration over remaining d2 content deal with the first column of the
474 # table.
474 # table.
475 for filename, d2 in diff_p2.items():
475 for filename, d2 in diff_p2.items():
476 _process_other_unchanged(md, mas, filename, d2)
476 _process_other_unchanged(md, mas, filename, d2)
477
477
478 for filename in copy_candidates:
478 for filename in copy_candidates:
479 copy_info = ctx[filename].renamed()
479 copy_info = ctx[filename].renamed()
480 if copy_info:
480 if copy_info:
481 source, srcnode = copy_info
481 source, srcnode = copy_info
482 if source in p1_ctx and p1_ctx[source].filenode() == srcnode:
482 if source in p1_ctx and p1_ctx[source].filenode() == srcnode:
483 md.mark_copied_from_p1(source, filename)
483 md.mark_copied_from_p1(source, filename)
484 elif source in p2_ctx and p2_ctx[source].filenode() == srcnode:
484 elif source in p2_ctx and p2_ctx[source].filenode() == srcnode:
485 md.mark_copied_from_p2(source, filename)
485 md.mark_copied_from_p2(source, filename)
486 return md
486 return md
487
487
488
488
489 def _find(manifest, filename):
489 def _find(manifest, filename):
490 """return the associate filenode or None"""
490 """return the associate filenode or None"""
491 if filename not in manifest:
491 if filename not in manifest:
492 return None
492 return None
493 return manifest.find(filename)[0]
493 return manifest.find(filename)[0]
494
494
495
495
496 def _process_other_unchanged(md, mas, filename, diff):
496 def _process_other_unchanged(md, mas, filename, diff):
497 source_node = diff[0][0]
497 source_node = diff[0][0]
498 target_node = diff[1][0]
498 target_node = diff[1][0]
499
499
500 if source_node is not None and target_node is None:
500 if source_node is not None and target_node is None:
501 if any(not _find(ma, filename) == source_node for ma in mas):
501 if any(not _find(ma, filename) == source_node for ma in mas):
502 # case πŸ„² of πŸ„·
502 # case πŸ„² of πŸ„·
503 md.mark_removed(filename)
503 md.mark_removed(filename)
504 # else, we have case πŸ„± or πŸ„Ά : no change need to be recorded
504 # else, we have case πŸ„± or πŸ„Ά : no change need to be recorded
505 elif source_node is None and target_node is not None:
505 elif source_node is None and target_node is not None:
506 if any(_find(ma, filename) is not None for ma in mas):
506 if any(_find(ma, filename) is not None for ma in mas):
507 # case πŸ„΄ or πŸ„Ή
507 # case πŸ„΄ or πŸ„Ή
508 md.mark_salvaged(filename)
508 md.mark_salvaged(filename)
509 # else, we have case πŸ„³ or πŸ„Έ : simple merge without intervention
509 # else, we have case πŸ„³ or πŸ„Έ : simple merge without intervention
510 elif source_node is not None and target_node is not None:
510 elif source_node is not None and target_node is not None:
511 # case πŸ„΅ or πŸ„Ί : simple merge without intervention
511 # case πŸ„΅ or πŸ„Ί : simple merge without intervention
512 #
512 #
513 # In buggy case where source_node is not an ancestors of target_node.
513 # In buggy case where source_node is not an ancestors of target_node.
514 # There should have a been a new filenode created, recording this as
514 # There should have a been a new filenode created, recording this as
515 # "modified". We do not deal with them yet.
515 # "modified". We do not deal with them yet.
516 pass
516 pass
517 else:
517 else:
518 # An impossible case, the diff algorithm should not return entry if the
518 # An impossible case, the diff algorithm should not return entry if the
519 # file is missing on both side.
519 # file is missing on both side.
520 assert False, "unreachable"
520 assert False, "unreachable"
521
521
522
522
523 def _missing_from_all_ancestors(mas, filename):
523 def _missing_from_all_ancestors(mas, filename):
524 return all(_find(ma, filename) is None for ma in mas)
524 return all(_find(ma, filename) is None for ma in mas)
525
525
526
526
527 def computechangesetfilesadded(ctx):
527 def computechangesetfilesadded(ctx):
528 """return the list of files added in a changeset"""
528 """return the list of files added in a changeset"""
529 added = []
529 added = []
530 for f in ctx.files():
530 for f in ctx.files():
531 if not any(f in p for p in ctx.parents()):
531 if not any(f in p for p in ctx.parents()):
532 added.append(f)
532 added.append(f)
533 return added
533 return added
534
534
535
535
536 def get_removal_filter(ctx, x=None):
536 def get_removal_filter(ctx, x=None):
537 """return a function to detect files "wrongly" detected as `removed`
537 """return a function to detect files "wrongly" detected as `removed`
538
538
539 When a file is removed relative to p1 in a merge, this
539 When a file is removed relative to p1 in a merge, this
540 function determines whether the absence is due to a
540 function determines whether the absence is due to a
541 deletion from a parent, or whether the merge commit
541 deletion from a parent, or whether the merge commit
542 itself deletes the file. We decide this by doing a
542 itself deletes the file. We decide this by doing a
543 simplified three way merge of the manifest entry for
543 simplified three way merge of the manifest entry for
544 the file. There are two ways we decide the merge
544 the file. There are two ways we decide the merge
545 itself didn't delete a file:
545 itself didn't delete a file:
546 - neither parent (nor the merge) contain the file
546 - neither parent (nor the merge) contain the file
547 - exactly one parent contains the file, and that
547 - exactly one parent contains the file, and that
548 parent has the same filelog entry as the merge
548 parent has the same filelog entry as the merge
549 ancestor (or all of them if there two). In other
549 ancestor (or all of them if there two). In other
550 words, that parent left the file unchanged while the
550 words, that parent left the file unchanged while the
551 other one deleted it.
551 other one deleted it.
552 One way to think about this is that deleting a file is
552 One way to think about this is that deleting a file is
553 similar to emptying it, so the list of changed files
553 similar to emptying it, so the list of changed files
554 should be similar either way. The computation
554 should be similar either way. The computation
555 described above is not done directly in _filecommit
555 described above is not done directly in _filecommit
556 when creating the list of changed files, however
556 when creating the list of changed files, however
557 it does something very similar by comparing filelog
557 it does something very similar by comparing filelog
558 nodes.
558 nodes.
559 """
559 """
560
560
561 if x is not None:
561 if x is not None:
562 p1, p2, m1, m2 = x
562 p1, p2, m1, m2 = x
563 else:
563 else:
564 p1 = ctx.p1()
564 p1 = ctx.p1()
565 p2 = ctx.p2()
565 p2 = ctx.p2()
566 m1 = p1.manifest()
566 m1 = p1.manifest()
567 m2 = p2.manifest()
567 m2 = p2.manifest()
568
568
569 @util.cachefunc
569 @util.cachefunc
570 def mas():
570 def mas():
571 p1n = p1.node()
571 p1n = p1.node()
572 p2n = p2.node()
572 p2n = p2.node()
573 cahs = ctx.repo().changelog.commonancestorsheads(p1n, p2n)
573 cahs = ctx.repo().changelog.commonancestorsheads(p1n, p2n)
574 if not cahs:
574 if not cahs:
575 cahs = [nullrev]
575 cahs = [nullrev]
576 return [ctx.repo()[r].manifest() for r in cahs]
576 return [ctx.repo()[r].manifest() for r in cahs]
577
577
578 def deletionfromparent(f):
578 def deletionfromparent(f):
579 if f in m1:
579 if f in m1:
580 return f not in m2 and all(
580 return f not in m2 and all(
581 f in ma and ma.find(f) == m1.find(f) for ma in mas()
581 f in ma and ma.find(f) == m1.find(f) for ma in mas()
582 )
582 )
583 elif f in m2:
583 elif f in m2:
584 return all(f in ma and ma.find(f) == m2.find(f) for ma in mas())
584 return all(f in ma and ma.find(f) == m2.find(f) for ma in mas())
585 else:
585 else:
586 return True
586 return True
587
587
588 return deletionfromparent
588 return deletionfromparent
589
589
590
590
591 def computechangesetfilesremoved(ctx):
591 def computechangesetfilesremoved(ctx):
592 """return the list of files removed in a changeset"""
592 """return the list of files removed in a changeset"""
593 removed = []
593 removed = []
594 for f in ctx.files():
594 for f in ctx.files():
595 if f not in ctx:
595 if f not in ctx:
596 removed.append(f)
596 removed.append(f)
597 if removed:
597 if removed:
598 rf = get_removal_filter(ctx)
598 rf = get_removal_filter(ctx)
599 removed = [r for r in removed if not rf(r)]
599 removed = [r for r in removed if not rf(r)]
600 return removed
600 return removed
601
601
602
602
603 def computechangesetfilesmerged(ctx):
603 def computechangesetfilesmerged(ctx):
604 """return the list of files merged in a changeset"""
604 """return the list of files merged in a changeset"""
605 merged = []
605 merged = []
606 if len(ctx.parents()) < 2:
606 if len(ctx.parents()) < 2:
607 return merged
607 return merged
608 for f in ctx.files():
608 for f in ctx.files():
609 if f in ctx:
609 if f in ctx:
610 fctx = ctx[f]
610 fctx = ctx[f]
611 parents = fctx._filelog.parents(fctx._filenode)
611 parents = fctx._filelog.parents(fctx._filenode)
612 if parents[1] != ctx.repo().nullid:
612 if parents[1] != ctx.repo().nullid:
613 merged.append(f)
613 merged.append(f)
614 return merged
614 return merged
615
615
616
616
617 def computechangesetcopies(ctx):
617 def computechangesetcopies(ctx):
618 """return the copies data for a changeset
618 """return the copies data for a changeset
619
619
620 The copies data are returned as a pair of dictionnary (p1copies, p2copies).
620 The copies data are returned as a pair of dictionnary (p1copies, p2copies).
621
621
622 Each dictionnary are in the form: `{newname: oldname}`
622 Each dictionnary are in the form: `{newname: oldname}`
623 """
623 """
624 p1copies = {}
624 p1copies = {}
625 p2copies = {}
625 p2copies = {}
626 p1 = ctx.p1()
626 p1 = ctx.p1()
627 p2 = ctx.p2()
627 p2 = ctx.p2()
628 narrowmatch = ctx._repo.narrowmatch()
628 narrowmatch = ctx._repo.narrowmatch()
629 for dst in ctx.files():
629 for dst in ctx.files():
630 if not narrowmatch(dst) or dst not in ctx:
630 if not narrowmatch(dst) or dst not in ctx:
631 continue
631 continue
632 copied = ctx[dst].renamed()
632 copied = ctx[dst].renamed()
633 if not copied:
633 if not copied:
634 continue
634 continue
635 src, srcnode = copied
635 src, srcnode = copied
636 if src in p1 and p1[src].filenode() == srcnode:
636 if src in p1 and p1[src].filenode() == srcnode:
637 p1copies[dst] = src
637 p1copies[dst] = src
638 elif src in p2 and p2[src].filenode() == srcnode:
638 elif src in p2 and p2[src].filenode() == srcnode:
639 p2copies[dst] = src
639 p2copies[dst] = src
640 return p1copies, p2copies
640 return p1copies, p2copies
641
641
642
642
643 def encodecopies(files, copies):
643 def encodecopies(files, copies):
644 items = []
644 items = []
645 for i, dst in enumerate(files):
645 for i, dst in enumerate(files):
646 if dst in copies:
646 if dst in copies:
647 items.append(b'%d\0%s' % (i, copies[dst]))
647 items.append(b'%d\0%s' % (i, copies[dst]))
648 if len(items) != len(copies):
648 if len(items) != len(copies):
649 raise error.ProgrammingError(
649 raise error.ProgrammingError(
650 b'some copy targets missing from file list'
650 b'some copy targets missing from file list'
651 )
651 )
652 return b"\n".join(items)
652 return b"\n".join(items)
653
653
654
654
655 def decodecopies(files, data):
655 def decodecopies(files, data):
656 try:
656 try:
657 copies = {}
657 copies = {}
658 if not data:
658 if not data:
659 return copies
659 return copies
660 for l in data.split(b'\n'):
660 for l in data.split(b'\n'):
661 strindex, src = l.split(b'\0')
661 strindex, src = l.split(b'\0')
662 i = int(strindex)
662 i = int(strindex)
663 dst = files[i]
663 dst = files[i]
664 copies[dst] = src
664 copies[dst] = src
665 return copies
665 return copies
666 except (ValueError, IndexError):
666 except (ValueError, IndexError):
667 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
667 # Perhaps someone had chosen the same key name (e.g. "p1copies") and
668 # used different syntax for the value.
668 # used different syntax for the value.
669 return None
669 return None
670
670
671
671
672 def encodefileindices(files, subset):
672 def encodefileindices(files, subset):
673 subset = set(subset)
673 subset = set(subset)
674 indices = []
674 indices = []
675 for i, f in enumerate(files):
675 for i, f in enumerate(files):
676 if f in subset:
676 if f in subset:
677 indices.append(b'%d' % i)
677 indices.append(b'%d' % i)
678 return b'\n'.join(indices)
678 return b'\n'.join(indices)
679
679
680
680
681 def decodefileindices(files, data):
681 def decodefileindices(files, data):
682 try:
682 try:
683 subset = []
683 subset = []
684 if not data:
684 if not data:
685 return subset
685 return subset
686 for strindex in data.split(b'\n'):
686 for strindex in data.split(b'\n'):
687 i = int(strindex)
687 i = int(strindex)
688 if i < 0 or i >= len(files):
688 if i < 0 or i >= len(files):
689 return None
689 return None
690 subset.append(files[i])
690 subset.append(files[i])
691 return subset
691 return subset
692 except (ValueError, IndexError):
692 except (ValueError, IndexError):
693 # Perhaps someone had chosen the same key name (e.g. "added") and
693 # Perhaps someone had chosen the same key name (e.g. "added") and
694 # used different syntax for the value.
694 # used different syntax for the value.
695 return None
695 return None
696
696
697
697
698 # see mercurial/helptext/internals/revlogs.txt for details about the format
698 # see mercurial/helptext/internals/revlogs.txt for details about the format
699
699
700 ACTION_MASK = int("111" "00", 2)
700 ACTION_MASK = int("111" "00", 2)
701 # note: untouched file used as copy source will as `000` for this mask.
701 # note: untouched file used as copy source will as `000` for this mask.
702 ADDED_FLAG = int("001" "00", 2)
702 ADDED_FLAG = int("001" "00", 2)
703 MERGED_FLAG = int("010" "00", 2)
703 MERGED_FLAG = int("010" "00", 2)
704 REMOVED_FLAG = int("011" "00", 2)
704 REMOVED_FLAG = int("011" "00", 2)
705 SALVAGED_FLAG = int("100" "00", 2)
705 SALVAGED_FLAG = int("100" "00", 2)
706 TOUCHED_FLAG = int("101" "00", 2)
706 TOUCHED_FLAG = int("101" "00", 2)
707
707
708 COPIED_MASK = int("11", 2)
708 COPIED_MASK = int("11", 2)
709 COPIED_FROM_P1_FLAG = int("10", 2)
709 COPIED_FROM_P1_FLAG = int("10", 2)
710 COPIED_FROM_P2_FLAG = int("11", 2)
710 COPIED_FROM_P2_FLAG = int("11", 2)
711
711
712 # structure is <flag><filename-end><copy-source>
712 # structure is <flag><filename-end><copy-source>
713 INDEX_HEADER = struct.Struct(">L")
713 INDEX_HEADER = struct.Struct(">L")
714 INDEX_ENTRY = struct.Struct(">bLL")
714 INDEX_ENTRY = struct.Struct(">bLL")
715
715
716
716
717 def encode_files_sidedata(files):
717 def encode_files_sidedata(files):
718 all_files = set(files.touched)
718 all_files = set(files.touched)
719 all_files.update(files.copied_from_p1.values())
719 all_files.update(files.copied_from_p1.values())
720 all_files.update(files.copied_from_p2.values())
720 all_files.update(files.copied_from_p2.values())
721 all_files = sorted(all_files)
721 all_files = sorted(all_files)
722 file_idx = {f: i for (i, f) in enumerate(all_files)}
722 file_idx = {f: i for (i, f) in enumerate(all_files)}
723 file_idx[None] = 0
723 file_idx[None] = 0
724
724
725 chunks = [INDEX_HEADER.pack(len(all_files))]
725 chunks = [INDEX_HEADER.pack(len(all_files))]
726
726
727 filename_length = 0
727 filename_length = 0
728 for f in all_files:
728 for f in all_files:
729 filename_size = len(f)
729 filename_size = len(f)
730 filename_length += filename_size
730 filename_length += filename_size
731 flag = 0
731 flag = 0
732 if f in files.added:
732 if f in files.added:
733 flag |= ADDED_FLAG
733 flag |= ADDED_FLAG
734 elif f in files.merged:
734 elif f in files.merged:
735 flag |= MERGED_FLAG
735 flag |= MERGED_FLAG
736 elif f in files.removed:
736 elif f in files.removed:
737 flag |= REMOVED_FLAG
737 flag |= REMOVED_FLAG
738 elif f in files.salvaged:
738 elif f in files.salvaged:
739 flag |= SALVAGED_FLAG
739 flag |= SALVAGED_FLAG
740 elif f in files.touched:
740 elif f in files.touched:
741 flag |= TOUCHED_FLAG
741 flag |= TOUCHED_FLAG
742
742
743 copy = None
743 copy = None
744 if f in files.copied_from_p1:
744 if f in files.copied_from_p1:
745 flag |= COPIED_FROM_P1_FLAG
745 flag |= COPIED_FROM_P1_FLAG
746 copy = files.copied_from_p1.get(f)
746 copy = files.copied_from_p1.get(f)
747 elif f in files.copied_from_p2:
747 elif f in files.copied_from_p2:
748 copy = files.copied_from_p2.get(f)
748 copy = files.copied_from_p2.get(f)
749 flag |= COPIED_FROM_P2_FLAG
749 flag |= COPIED_FROM_P2_FLAG
750 copy_idx = file_idx[copy]
750 copy_idx = file_idx[copy]
751 chunks.append(INDEX_ENTRY.pack(flag, filename_length, copy_idx))
751 chunks.append(INDEX_ENTRY.pack(flag, filename_length, copy_idx))
752 chunks.extend(all_files)
752 chunks.extend(all_files)
753 return {sidedatamod.SD_FILES: b''.join(chunks)}
753 return {sidedatamod.SD_FILES: b''.join(chunks)}
754
754
755
755
756 def decode_files_sidedata(sidedata):
756 def decode_files_sidedata(sidedata):
757 md = ChangingFiles()
757 md = ChangingFiles()
758 raw = sidedata.get(sidedatamod.SD_FILES)
758 raw = sidedata.get(sidedatamod.SD_FILES)
759
759
760 if raw is None:
760 if raw is None:
761 return md
761 return md
762
762
763 copies = []
763 copies = []
764 all_files = []
764 all_files = []
765
765
766 assert len(raw) >= INDEX_HEADER.size
766 assert len(raw) >= INDEX_HEADER.size
767 total_files = INDEX_HEADER.unpack_from(raw, 0)[0]
767 total_files = INDEX_HEADER.unpack_from(raw, 0)[0]
768
768
769 offset = INDEX_HEADER.size
769 offset = INDEX_HEADER.size
770 file_offset_base = offset + (INDEX_ENTRY.size * total_files)
770 file_offset_base = offset + (INDEX_ENTRY.size * total_files)
771 file_offset_last = file_offset_base
771 file_offset_last = file_offset_base
772
772
773 assert len(raw) >= file_offset_base
773 assert len(raw) >= file_offset_base
774
774
775 for idx in range(total_files):
775 for idx in range(total_files):
776 flag, file_end, copy_idx = INDEX_ENTRY.unpack_from(raw, offset)
776 flag, file_end, copy_idx = INDEX_ENTRY.unpack_from(raw, offset)
777 file_end += file_offset_base
777 file_end += file_offset_base
778 filename = raw[file_offset_last:file_end]
778 filename = raw[file_offset_last:file_end]
779 filesize = file_end - file_offset_last
779 filesize = file_end - file_offset_last
780 assert len(filename) == filesize
780 assert len(filename) == filesize
781 offset += INDEX_ENTRY.size
781 offset += INDEX_ENTRY.size
782 file_offset_last = file_end
782 file_offset_last = file_end
783 all_files.append(filename)
783 all_files.append(filename)
784 if flag & ACTION_MASK == ADDED_FLAG:
784 if flag & ACTION_MASK == ADDED_FLAG:
785 md.mark_added(filename)
785 md.mark_added(filename)
786 elif flag & ACTION_MASK == MERGED_FLAG:
786 elif flag & ACTION_MASK == MERGED_FLAG:
787 md.mark_merged(filename)
787 md.mark_merged(filename)
788 elif flag & ACTION_MASK == REMOVED_FLAG:
788 elif flag & ACTION_MASK == REMOVED_FLAG:
789 md.mark_removed(filename)
789 md.mark_removed(filename)
790 elif flag & ACTION_MASK == SALVAGED_FLAG:
790 elif flag & ACTION_MASK == SALVAGED_FLAG:
791 md.mark_salvaged(filename)
791 md.mark_salvaged(filename)
792 elif flag & ACTION_MASK == TOUCHED_FLAG:
792 elif flag & ACTION_MASK == TOUCHED_FLAG:
793 md.mark_touched(filename)
793 md.mark_touched(filename)
794
794
795 copied = None
795 copied = None
796 if flag & COPIED_MASK == COPIED_FROM_P1_FLAG:
796 if flag & COPIED_MASK == COPIED_FROM_P1_FLAG:
797 copied = md.mark_copied_from_p1
797 copied = md.mark_copied_from_p1
798 elif flag & COPIED_MASK == COPIED_FROM_P2_FLAG:
798 elif flag & COPIED_MASK == COPIED_FROM_P2_FLAG:
799 copied = md.mark_copied_from_p2
799 copied = md.mark_copied_from_p2
800
800
801 if copied is not None:
801 if copied is not None:
802 copies.append((copied, filename, copy_idx))
802 copies.append((copied, filename, copy_idx))
803
803
804 for copied, filename, copy_idx in copies:
804 for copied, filename, copy_idx in copies:
805 copied(all_files[copy_idx], filename)
805 copied(all_files[copy_idx], filename)
806
806
807 return md
807 return md
808
808
809
809
810 def _getsidedata(srcrepo, rev):
810 def _getsidedata(srcrepo, rev):
811 ctx = srcrepo[rev]
811 ctx = srcrepo[rev]
812 files = compute_all_files_changes(ctx)
812 files = compute_all_files_changes(ctx)
813 return encode_files_sidedata(files), files.has_copies_info
813 return encode_files_sidedata(files), files.has_copies_info
814
814
815
815
816 def copies_sidedata_computer(repo, revlog, rev, existing_sidedata):
816 def copies_sidedata_computer(repo, revlog, rev, existing_sidedata):
817 sidedata, has_copies_info = _getsidedata(repo, rev)
817 sidedata, has_copies_info = _getsidedata(repo, rev)
818 flags_to_add = sidedataflag.REVIDX_HASCOPIESINFO if has_copies_info else 0
818 flags_to_add = sidedataflag.REVIDX_HASCOPIESINFO if has_copies_info else 0
819 return sidedata, (flags_to_add, 0)
819 return sidedata, (flags_to_add, 0)
820
820
821
821
822 def _sidedata_worker(srcrepo, revs_queue, sidedata_queue, tokens):
822 def _sidedata_worker(srcrepo, revs_queue, sidedata_queue, tokens):
823 """The function used by worker precomputing sidedata
823 """The function used by worker precomputing sidedata
824
824
825 It read an input queue containing revision numbers
825 It read an input queue containing revision numbers
826 It write in an output queue containing (rev, <sidedata-map>)
826 It write in an output queue containing (rev, <sidedata-map>)
827
827
828 The `None` input value is used as a stop signal.
828 The `None` input value is used as a stop signal.
829
829
830 The `tokens` semaphore is user to avoid having too many unprocessed
830 The `tokens` semaphore is user to avoid having too many unprocessed
831 entries. The workers needs to acquire one token before fetching a task.
831 entries. The workers needs to acquire one token before fetching a task.
832 They will be released by the consumer of the produced data.
832 They will be released by the consumer of the produced data.
833 """
833 """
834 tokens.acquire()
834 tokens.acquire()
835 rev = revs_queue.get()
835 rev = revs_queue.get()
836 while rev is not None:
836 while rev is not None:
837 data = _getsidedata(srcrepo, rev)
837 data = _getsidedata(srcrepo, rev)
838 sidedata_queue.put((rev, data))
838 sidedata_queue.put((rev, data))
839 tokens.acquire()
839 tokens.acquire()
840 rev = revs_queue.get()
840 rev = revs_queue.get()
841 # processing of `None` is completed, release the token.
841 # processing of `None` is completed, release the token.
842 tokens.release()
842 tokens.release()
843
843
844
844
845 BUFF_PER_WORKER = 50
845 BUFF_PER_WORKER = 50
846
846
847
847
848 def _get_worker_sidedata_adder(srcrepo, destrepo):
848 def _get_worker_sidedata_adder(srcrepo, destrepo):
849 """The parallel version of the sidedata computation
849 """The parallel version of the sidedata computation
850
850
851 This code spawn a pool of worker that precompute a buffer of sidedata
851 This code spawn a pool of worker that precompute a buffer of sidedata
852 before we actually need them"""
852 before we actually need them"""
853 # avoid circular import copies -> scmutil -> worker -> copies
853 # avoid circular import copies -> scmutil -> worker -> copies
854 from . import worker
854 from . import worker
855
855
856 nbworkers = worker._numworkers(srcrepo.ui)
856 nbworkers = worker._numworkers(srcrepo.ui)
857
857
858 tokens = multiprocessing.BoundedSemaphore(nbworkers * BUFF_PER_WORKER)
858 tokens = multiprocessing.BoundedSemaphore(nbworkers * BUFF_PER_WORKER)
859 revsq = multiprocessing.Queue()
859 revsq = multiprocessing.Queue()
860 sidedataq = multiprocessing.Queue()
860 sidedataq = multiprocessing.Queue()
861
861
862 assert srcrepo.filtername is None
862 assert srcrepo.filtername is None
863 # queue all tasks beforehand, revision numbers are small and it make
863 # queue all tasks beforehand, revision numbers are small and it make
864 # synchronisation simpler
864 # synchronisation simpler
865 #
865 #
866 # Since the computation for each node can be quite expensive, the overhead
866 # Since the computation for each node can be quite expensive, the overhead
867 # of using a single queue is not revelant. In practice, most computation
867 # of using a single queue is not revelant. In practice, most computation
868 # are fast but some are very expensive and dominate all the other smaller
868 # are fast but some are very expensive and dominate all the other smaller
869 # cost.
869 # cost.
870 for r in srcrepo.changelog.revs():
870 for r in srcrepo.changelog.revs():
871 revsq.put(r)
871 revsq.put(r)
872 # queue the "no more tasks" markers
872 # queue the "no more tasks" markers
873 for i in range(nbworkers):
873 for i in range(nbworkers):
874 revsq.put(None)
874 revsq.put(None)
875
875
876 allworkers = []
876 allworkers = []
877 for i in range(nbworkers):
877 for i in range(nbworkers):
878 args = (srcrepo, revsq, sidedataq, tokens)
878 args = (srcrepo, revsq, sidedataq, tokens)
879 w = multiprocessing.Process(target=_sidedata_worker, args=args)
879 w = multiprocessing.Process(target=_sidedata_worker, args=args)
880 allworkers.append(w)
880 allworkers.append(w)
881 w.start()
881 w.start()
882
882
883 # dictionnary to store results for revision higher than we one we are
883 # dictionnary to store results for revision higher than we one we are
884 # looking for. For example, if we need the sidedatamap for 42, and 43 is
884 # looking for. For example, if we need the sidedatamap for 42, and 43 is
885 # received, when shelve 43 for later use.
885 # received, when shelve 43 for later use.
886 staging = {}
886 staging = {}
887
887
888 def sidedata_companion(repo, revlog, rev, old_sidedata):
888 def sidedata_companion(repo, revlog, rev, old_sidedata):
889 # Is the data previously shelved ?
889 # Is the data previously shelved ?
890 data = staging.pop(rev, None)
890 data = staging.pop(rev, None)
891 if data is None:
891 if data is None:
892 # look at the queued result until we find the one we are lookig
892 # look at the queued result until we find the one we are lookig
893 # for (shelve the other ones)
893 # for (shelve the other ones)
894 r, data = sidedataq.get()
894 r, data = sidedataq.get()
895 while r != rev:
895 while r != rev:
896 staging[r] = data
896 staging[r] = data
897 r, data = sidedataq.get()
897 r, data = sidedataq.get()
898 tokens.release()
898 tokens.release()
899 sidedata, has_copies_info = data
899 sidedata, has_copies_info = data
900 new_flag = 0
900 new_flag = 0
901 if has_copies_info:
901 if has_copies_info:
902 new_flag = sidedataflag.REVIDX_HASCOPIESINFO
902 new_flag = sidedataflag.REVIDX_HASCOPIESINFO
903 return sidedata, (new_flag, 0)
903 return sidedata, (new_flag, 0)
904
904
905 return sidedata_companion
905 return sidedata_companion
@@ -1,3255 +1,3255
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import copy
12 import copy
13 import os
13 import os
14 import re
14 import re
15 import shutil
15 import shutil
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 sha1nodeconstants,
21 sha1nodeconstants,
22 short,
22 short,
23 )
23 )
24 from .pycompat import open
24 from .pycompat import open
25 from . import (
25 from . import (
26 copies,
26 copies,
27 diffhelper,
27 diffhelper,
28 diffutil,
28 diffutil,
29 encoding,
29 encoding,
30 error,
30 error,
31 mail,
31 mail,
32 mdiff,
32 mdiff,
33 pathutil,
33 pathutil,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 similar,
36 similar,
37 util,
37 util,
38 vfs as vfsmod,
38 vfs as vfsmod,
39 )
39 )
40 from .utils import (
40 from .utils import (
41 dateutil,
41 dateutil,
42 hashutil,
42 hashutil,
43 procutil,
43 procutil,
44 stringutil,
44 stringutil,
45 )
45 )
46
46
47 stringio = util.stringio
47 stringio = util.stringio
48
48
49 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
49 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
50 tabsplitter = re.compile(br'(\t+|[^\t]+)')
50 tabsplitter = re.compile(br'(\t+|[^\t]+)')
51 wordsplitter = re.compile(
51 wordsplitter = re.compile(
52 br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])'
52 br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])'
53 )
53 )
54
54
55 PatchError = error.PatchError
55 PatchError = error.PatchError
56 PatchParseError = error.PatchParseError
56 PatchParseError = error.PatchParseError
57 PatchApplicationError = error.PatchApplicationError
57 PatchApplicationError = error.PatchApplicationError
58
58
59 # public functions
59 # public functions
60
60
61
61
62 def split(stream):
62 def split(stream):
63 '''return an iterator of individual patches from a stream'''
63 '''return an iterator of individual patches from a stream'''
64
64
65 def isheader(line, inheader):
65 def isheader(line, inheader):
66 if inheader and line.startswith((b' ', b'\t')):
66 if inheader and line.startswith((b' ', b'\t')):
67 # continuation
67 # continuation
68 return True
68 return True
69 if line.startswith((b' ', b'-', b'+')):
69 if line.startswith((b' ', b'-', b'+')):
70 # diff line - don't check for header pattern in there
70 # diff line - don't check for header pattern in there
71 return False
71 return False
72 l = line.split(b': ', 1)
72 l = line.split(b': ', 1)
73 return len(l) == 2 and b' ' not in l[0]
73 return len(l) == 2 and b' ' not in l[0]
74
74
75 def chunk(lines):
75 def chunk(lines):
76 return stringio(b''.join(lines))
76 return stringio(b''.join(lines))
77
77
78 def hgsplit(stream, cur):
78 def hgsplit(stream, cur):
79 inheader = True
79 inheader = True
80
80
81 for line in stream:
81 for line in stream:
82 if not line.strip():
82 if not line.strip():
83 inheader = False
83 inheader = False
84 if not inheader and line.startswith(b'# HG changeset patch'):
84 if not inheader and line.startswith(b'# HG changeset patch'):
85 yield chunk(cur)
85 yield chunk(cur)
86 cur = []
86 cur = []
87 inheader = True
87 inheader = True
88
88
89 cur.append(line)
89 cur.append(line)
90
90
91 if cur:
91 if cur:
92 yield chunk(cur)
92 yield chunk(cur)
93
93
94 def mboxsplit(stream, cur):
94 def mboxsplit(stream, cur):
95 for line in stream:
95 for line in stream:
96 if line.startswith(b'From '):
96 if line.startswith(b'From '):
97 for c in split(chunk(cur[1:])):
97 for c in split(chunk(cur[1:])):
98 yield c
98 yield c
99 cur = []
99 cur = []
100
100
101 cur.append(line)
101 cur.append(line)
102
102
103 if cur:
103 if cur:
104 for c in split(chunk(cur[1:])):
104 for c in split(chunk(cur[1:])):
105 yield c
105 yield c
106
106
107 def mimesplit(stream, cur):
107 def mimesplit(stream, cur):
108 def msgfp(m):
108 def msgfp(m):
109 fp = stringio()
109 fp = stringio()
110 # pytype: disable=wrong-arg-types
110 # pytype: disable=wrong-arg-types
111 g = mail.Generator(fp, mangle_from_=False)
111 g = mail.Generator(fp, mangle_from_=False)
112 # pytype: enable=wrong-arg-types
112 # pytype: enable=wrong-arg-types
113 g.flatten(m)
113 g.flatten(m)
114 fp.seek(0)
114 fp.seek(0)
115 return fp
115 return fp
116
116
117 for line in stream:
117 for line in stream:
118 cur.append(line)
118 cur.append(line)
119 c = chunk(cur)
119 c = chunk(cur)
120
120
121 m = mail.parse(c)
121 m = mail.parse(c)
122 if not m.is_multipart():
122 if not m.is_multipart():
123 yield msgfp(m)
123 yield msgfp(m)
124 else:
124 else:
125 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
125 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
126 for part in m.walk():
126 for part in m.walk():
127 ct = part.get_content_type()
127 ct = part.get_content_type()
128 if ct not in ok_types:
128 if ct not in ok_types:
129 continue
129 continue
130 yield msgfp(part)
130 yield msgfp(part)
131
131
132 def headersplit(stream, cur):
132 def headersplit(stream, cur):
133 inheader = False
133 inheader = False
134
134
135 for line in stream:
135 for line in stream:
136 if not inheader and isheader(line, inheader):
136 if not inheader and isheader(line, inheader):
137 yield chunk(cur)
137 yield chunk(cur)
138 cur = []
138 cur = []
139 inheader = True
139 inheader = True
140 if inheader and not isheader(line, inheader):
140 if inheader and not isheader(line, inheader):
141 inheader = False
141 inheader = False
142
142
143 cur.append(line)
143 cur.append(line)
144
144
145 if cur:
145 if cur:
146 yield chunk(cur)
146 yield chunk(cur)
147
147
148 def remainder(cur):
148 def remainder(cur):
149 yield chunk(cur)
149 yield chunk(cur)
150
150
151 class fiter:
151 class fiter:
152 def __init__(self, fp):
152 def __init__(self, fp):
153 self.fp = fp
153 self.fp = fp
154
154
155 def __iter__(self):
155 def __iter__(self):
156 return self
156 return self
157
157
158 def next(self):
158 def next(self):
159 l = self.fp.readline()
159 l = self.fp.readline()
160 if not l:
160 if not l:
161 raise StopIteration
161 raise StopIteration
162 return l
162 return l
163
163
164 __next__ = next
164 __next__ = next
165
165
166 inheader = False
166 inheader = False
167 cur = []
167 cur = []
168
168
169 mimeheaders = [b'content-type']
169 mimeheaders = [b'content-type']
170
170
171 if not hasattr(stream, 'next'):
171 if not hasattr(stream, 'next'):
172 # http responses, for example, have readline but not next
172 # http responses, for example, have readline but not next
173 stream = fiter(stream)
173 stream = fiter(stream)
174
174
175 for line in stream:
175 for line in stream:
176 cur.append(line)
176 cur.append(line)
177 if line.startswith(b'# HG changeset patch'):
177 if line.startswith(b'# HG changeset patch'):
178 return hgsplit(stream, cur)
178 return hgsplit(stream, cur)
179 elif line.startswith(b'From '):
179 elif line.startswith(b'From '):
180 return mboxsplit(stream, cur)
180 return mboxsplit(stream, cur)
181 elif isheader(line, inheader):
181 elif isheader(line, inheader):
182 inheader = True
182 inheader = True
183 if line.split(b':', 1)[0].lower() in mimeheaders:
183 if line.split(b':', 1)[0].lower() in mimeheaders:
184 # let email parser handle this
184 # let email parser handle this
185 return mimesplit(stream, cur)
185 return mimesplit(stream, cur)
186 elif line.startswith(b'--- ') and inheader:
186 elif line.startswith(b'--- ') and inheader:
187 # No evil headers seen by diff start, split by hand
187 # No evil headers seen by diff start, split by hand
188 return headersplit(stream, cur)
188 return headersplit(stream, cur)
189 # Not enough info, keep reading
189 # Not enough info, keep reading
190
190
191 # if we are here, we have a very plain patch
191 # if we are here, we have a very plain patch
192 return remainder(cur)
192 return remainder(cur)
193
193
194
194
195 ## Some facility for extensible patch parsing:
195 ## Some facility for extensible patch parsing:
196 # list of pairs ("header to match", "data key")
196 # list of pairs ("header to match", "data key")
197 patchheadermap = [
197 patchheadermap = [
198 (b'Date', b'date'),
198 (b'Date', b'date'),
199 (b'Branch', b'branch'),
199 (b'Branch', b'branch'),
200 (b'Node ID', b'nodeid'),
200 (b'Node ID', b'nodeid'),
201 ]
201 ]
202
202
203
203
204 @contextlib.contextmanager
204 @contextlib.contextmanager
205 def extract(ui, fileobj):
205 def extract(ui, fileobj):
206 """extract patch from data read from fileobj.
206 """extract patch from data read from fileobj.
207
207
208 patch can be a normal patch or contained in an email message.
208 patch can be a normal patch or contained in an email message.
209
209
210 return a dictionary. Standard keys are:
210 return a dictionary. Standard keys are:
211 - filename,
211 - filename,
212 - message,
212 - message,
213 - user,
213 - user,
214 - date,
214 - date,
215 - branch,
215 - branch,
216 - node,
216 - node,
217 - p1,
217 - p1,
218 - p2.
218 - p2.
219 Any item can be missing from the dictionary. If filename is missing,
219 Any item can be missing from the dictionary. If filename is missing,
220 fileobj did not contain a patch. Caller must unlink filename when done."""
220 fileobj did not contain a patch. Caller must unlink filename when done."""
221
221
222 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
222 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
223 tmpfp = os.fdopen(fd, 'wb')
223 tmpfp = os.fdopen(fd, 'wb')
224 try:
224 try:
225 yield _extract(ui, fileobj, tmpname, tmpfp)
225 yield _extract(ui, fileobj, tmpname, tmpfp)
226 finally:
226 finally:
227 tmpfp.close()
227 tmpfp.close()
228 os.unlink(tmpname)
228 os.unlink(tmpname)
229
229
230
230
231 def _extract(ui, fileobj, tmpname, tmpfp):
231 def _extract(ui, fileobj, tmpname, tmpfp):
232 # attempt to detect the start of a patch
232 # attempt to detect the start of a patch
233 # (this heuristic is borrowed from quilt)
233 # (this heuristic is borrowed from quilt)
234 diffre = re.compile(
234 diffre = re.compile(
235 br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
235 br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
236 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
236 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
237 br'---[ \t].*?^\+\+\+[ \t]|'
237 br'---[ \t].*?^\+\+\+[ \t]|'
238 br'\*\*\*[ \t].*?^---[ \t])',
238 br'\*\*\*[ \t].*?^---[ \t])',
239 re.MULTILINE | re.DOTALL,
239 re.MULTILINE | re.DOTALL,
240 )
240 )
241
241
242 data = {}
242 data = {}
243
243
244 msg = mail.parse(fileobj)
244 msg = mail.parse(fileobj)
245
245
246 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
246 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
247 data[b'user'] = msg['From'] and mail.headdecode(msg['From'])
247 data[b'user'] = msg['From'] and mail.headdecode(msg['From'])
248 if not subject and not data[b'user']:
248 if not subject and not data[b'user']:
249 # Not an email, restore parsed headers if any
249 # Not an email, restore parsed headers if any
250 subject = (
250 subject = (
251 b'\n'.join(
251 b'\n'.join(
252 b': '.join(map(encoding.strtolocal, h)) for h in msg.items()
252 b': '.join(map(encoding.strtolocal, h)) for h in msg.items()
253 )
253 )
254 + b'\n'
254 + b'\n'
255 )
255 )
256
256
257 # should try to parse msg['Date']
257 # should try to parse msg['Date']
258 parents = []
258 parents = []
259
259
260 nodeid = msg['X-Mercurial-Node']
260 nodeid = msg['X-Mercurial-Node']
261 if nodeid:
261 if nodeid:
262 data[b'nodeid'] = nodeid = mail.headdecode(nodeid)
262 data[b'nodeid'] = nodeid = mail.headdecode(nodeid)
263 ui.debug(b'Node ID: %s\n' % nodeid)
263 ui.debug(b'Node ID: %s\n' % nodeid)
264
264
265 if subject:
265 if subject:
266 if subject.startswith(b'[PATCH'):
266 if subject.startswith(b'[PATCH'):
267 pend = subject.find(b']')
267 pend = subject.find(b']')
268 if pend >= 0:
268 if pend >= 0:
269 subject = subject[pend + 1 :].lstrip()
269 subject = subject[pend + 1 :].lstrip()
270 subject = re.sub(br'\n[ \t]+', b' ', subject)
270 subject = re.sub(br'\n[ \t]+', b' ', subject)
271 ui.debug(b'Subject: %s\n' % subject)
271 ui.debug(b'Subject: %s\n' % subject)
272 if data[b'user']:
272 if data[b'user']:
273 ui.debug(b'From: %s\n' % data[b'user'])
273 ui.debug(b'From: %s\n' % data[b'user'])
274 diffs_seen = 0
274 diffs_seen = 0
275 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
275 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
276 message = b''
276 message = b''
277 for part in msg.walk():
277 for part in msg.walk():
278 content_type = pycompat.bytestr(part.get_content_type())
278 content_type = pycompat.bytestr(part.get_content_type())
279 ui.debug(b'Content-Type: %s\n' % content_type)
279 ui.debug(b'Content-Type: %s\n' % content_type)
280 if content_type not in ok_types:
280 if content_type not in ok_types:
281 continue
281 continue
282 payload = part.get_payload(decode=True)
282 payload = part.get_payload(decode=True)
283 m = diffre.search(payload)
283 m = diffre.search(payload)
284 if m:
284 if m:
285 hgpatch = False
285 hgpatch = False
286 hgpatchheader = False
286 hgpatchheader = False
287 ignoretext = False
287 ignoretext = False
288
288
289 ui.debug(b'found patch at byte %d\n' % m.start(0))
289 ui.debug(b'found patch at byte %d\n' % m.start(0))
290 diffs_seen += 1
290 diffs_seen += 1
291 cfp = stringio()
291 cfp = stringio()
292 for line in payload[: m.start(0)].splitlines():
292 for line in payload[: m.start(0)].splitlines():
293 if line.startswith(b'# HG changeset patch') and not hgpatch:
293 if line.startswith(b'# HG changeset patch') and not hgpatch:
294 ui.debug(b'patch generated by hg export\n')
294 ui.debug(b'patch generated by hg export\n')
295 hgpatch = True
295 hgpatch = True
296 hgpatchheader = True
296 hgpatchheader = True
297 # drop earlier commit message content
297 # drop earlier commit message content
298 cfp.seek(0)
298 cfp.seek(0)
299 cfp.truncate()
299 cfp.truncate()
300 subject = None
300 subject = None
301 elif hgpatchheader:
301 elif hgpatchheader:
302 if line.startswith(b'# User '):
302 if line.startswith(b'# User '):
303 data[b'user'] = line[7:]
303 data[b'user'] = line[7:]
304 ui.debug(b'From: %s\n' % data[b'user'])
304 ui.debug(b'From: %s\n' % data[b'user'])
305 elif line.startswith(b"# Parent "):
305 elif line.startswith(b"# Parent "):
306 parents.append(line[9:].lstrip())
306 parents.append(line[9:].lstrip())
307 elif line.startswith(b"# "):
307 elif line.startswith(b"# "):
308 for header, key in patchheadermap:
308 for header, key in patchheadermap:
309 prefix = b'# %s ' % header
309 prefix = b'# %s ' % header
310 if line.startswith(prefix):
310 if line.startswith(prefix):
311 data[key] = line[len(prefix) :]
311 data[key] = line[len(prefix) :]
312 ui.debug(b'%s: %s\n' % (header, data[key]))
312 ui.debug(b'%s: %s\n' % (header, data[key]))
313 else:
313 else:
314 hgpatchheader = False
314 hgpatchheader = False
315 elif line == b'---':
315 elif line == b'---':
316 ignoretext = True
316 ignoretext = True
317 if not hgpatchheader and not ignoretext:
317 if not hgpatchheader and not ignoretext:
318 cfp.write(line)
318 cfp.write(line)
319 cfp.write(b'\n')
319 cfp.write(b'\n')
320 message = cfp.getvalue()
320 message = cfp.getvalue()
321 if tmpfp:
321 if tmpfp:
322 tmpfp.write(payload)
322 tmpfp.write(payload)
323 if not payload.endswith(b'\n'):
323 if not payload.endswith(b'\n'):
324 tmpfp.write(b'\n')
324 tmpfp.write(b'\n')
325 elif not diffs_seen and message and content_type == b'text/plain':
325 elif not diffs_seen and message and content_type == b'text/plain':
326 message += b'\n' + payload
326 message += b'\n' + payload
327
327
328 if subject and not message.startswith(subject):
328 if subject and not message.startswith(subject):
329 message = b'%s\n%s' % (subject, message)
329 message = b'%s\n%s' % (subject, message)
330 data[b'message'] = message
330 data[b'message'] = message
331 tmpfp.close()
331 tmpfp.close()
332 if parents:
332 if parents:
333 data[b'p1'] = parents.pop(0)
333 data[b'p1'] = parents.pop(0)
334 if parents:
334 if parents:
335 data[b'p2'] = parents.pop(0)
335 data[b'p2'] = parents.pop(0)
336
336
337 if diffs_seen:
337 if diffs_seen:
338 data[b'filename'] = tmpname
338 data[b'filename'] = tmpname
339
339
340 return data
340 return data
341
341
342
342
343 class patchmeta:
343 class patchmeta:
344 """Patched file metadata
344 """Patched file metadata
345
345
346 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
346 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
347 or COPY. 'path' is patched file path. 'oldpath' is set to the
347 or COPY. 'path' is patched file path. 'oldpath' is set to the
348 origin file when 'op' is either COPY or RENAME, None otherwise. If
348 origin file when 'op' is either COPY or RENAME, None otherwise. If
349 file mode is changed, 'mode' is a tuple (islink, isexec) where
349 file mode is changed, 'mode' is a tuple (islink, isexec) where
350 'islink' is True if the file is a symlink and 'isexec' is True if
350 'islink' is True if the file is a symlink and 'isexec' is True if
351 the file is executable. Otherwise, 'mode' is None.
351 the file is executable. Otherwise, 'mode' is None.
352 """
352 """
353
353
354 def __init__(self, path):
354 def __init__(self, path):
355 self.path = path
355 self.path = path
356 self.oldpath = None
356 self.oldpath = None
357 self.mode = None
357 self.mode = None
358 self.op = b'MODIFY'
358 self.op = b'MODIFY'
359 self.binary = False
359 self.binary = False
360
360
361 def setmode(self, mode):
361 def setmode(self, mode):
362 islink = mode & 0o20000
362 islink = mode & 0o20000
363 isexec = mode & 0o100
363 isexec = mode & 0o100
364 self.mode = (islink, isexec)
364 self.mode = (islink, isexec)
365
365
366 def copy(self):
366 def copy(self):
367 other = patchmeta(self.path)
367 other = patchmeta(self.path)
368 other.oldpath = self.oldpath
368 other.oldpath = self.oldpath
369 other.mode = self.mode
369 other.mode = self.mode
370 other.op = self.op
370 other.op = self.op
371 other.binary = self.binary
371 other.binary = self.binary
372 return other
372 return other
373
373
374 def _ispatchinga(self, afile):
374 def _ispatchinga(self, afile):
375 if afile == b'/dev/null':
375 if afile == b'/dev/null':
376 return self.op == b'ADD'
376 return self.op == b'ADD'
377 return afile == b'a/' + (self.oldpath or self.path)
377 return afile == b'a/' + (self.oldpath or self.path)
378
378
379 def _ispatchingb(self, bfile):
379 def _ispatchingb(self, bfile):
380 if bfile == b'/dev/null':
380 if bfile == b'/dev/null':
381 return self.op == b'DELETE'
381 return self.op == b'DELETE'
382 return bfile == b'b/' + self.path
382 return bfile == b'b/' + self.path
383
383
384 def ispatching(self, afile, bfile):
384 def ispatching(self, afile, bfile):
385 return self._ispatchinga(afile) and self._ispatchingb(bfile)
385 return self._ispatchinga(afile) and self._ispatchingb(bfile)
386
386
387 def __repr__(self):
387 def __repr__(self):
388 return "<patchmeta %s %r>" % (self.op, self.path)
388 return "<patchmeta %s %r>" % (self.op, self.path)
389
389
390
390
391 def readgitpatch(lr):
391 def readgitpatch(lr):
392 """extract git-style metadata about patches from <patchname>"""
392 """extract git-style metadata about patches from <patchname>"""
393
393
394 # Filter patch for git information
394 # Filter patch for git information
395 gp = None
395 gp = None
396 gitpatches = []
396 gitpatches = []
397 for line in lr:
397 for line in lr:
398 line = line.rstrip(b'\r\n')
398 line = line.rstrip(b'\r\n')
399 if line.startswith(b'diff --git a/'):
399 if line.startswith(b'diff --git a/'):
400 m = gitre.match(line)
400 m = gitre.match(line)
401 if m:
401 if m:
402 if gp:
402 if gp:
403 gitpatches.append(gp)
403 gitpatches.append(gp)
404 dst = m.group(2)
404 dst = m.group(2)
405 gp = patchmeta(dst)
405 gp = patchmeta(dst)
406 elif gp:
406 elif gp:
407 if line.startswith(b'--- '):
407 if line.startswith(b'--- '):
408 gitpatches.append(gp)
408 gitpatches.append(gp)
409 gp = None
409 gp = None
410 continue
410 continue
411 if line.startswith(b'rename from '):
411 if line.startswith(b'rename from '):
412 gp.op = b'RENAME'
412 gp.op = b'RENAME'
413 gp.oldpath = line[12:]
413 gp.oldpath = line[12:]
414 elif line.startswith(b'rename to '):
414 elif line.startswith(b'rename to '):
415 gp.path = line[10:]
415 gp.path = line[10:]
416 elif line.startswith(b'copy from '):
416 elif line.startswith(b'copy from '):
417 gp.op = b'COPY'
417 gp.op = b'COPY'
418 gp.oldpath = line[10:]
418 gp.oldpath = line[10:]
419 elif line.startswith(b'copy to '):
419 elif line.startswith(b'copy to '):
420 gp.path = line[8:]
420 gp.path = line[8:]
421 elif line.startswith(b'deleted file'):
421 elif line.startswith(b'deleted file'):
422 gp.op = b'DELETE'
422 gp.op = b'DELETE'
423 elif line.startswith(b'new file mode '):
423 elif line.startswith(b'new file mode '):
424 gp.op = b'ADD'
424 gp.op = b'ADD'
425 gp.setmode(int(line[-6:], 8))
425 gp.setmode(int(line[-6:], 8))
426 elif line.startswith(b'new mode '):
426 elif line.startswith(b'new mode '):
427 gp.setmode(int(line[-6:], 8))
427 gp.setmode(int(line[-6:], 8))
428 elif line.startswith(b'GIT binary patch'):
428 elif line.startswith(b'GIT binary patch'):
429 gp.binary = True
429 gp.binary = True
430 if gp:
430 if gp:
431 gitpatches.append(gp)
431 gitpatches.append(gp)
432
432
433 return gitpatches
433 return gitpatches
434
434
435
435
436 class linereader:
436 class linereader:
437 # simple class to allow pushing lines back into the input stream
437 # simple class to allow pushing lines back into the input stream
438 def __init__(self, fp):
438 def __init__(self, fp):
439 self.fp = fp
439 self.fp = fp
440 self.buf = []
440 self.buf = []
441
441
442 def push(self, line):
442 def push(self, line):
443 if line is not None:
443 if line is not None:
444 self.buf.append(line)
444 self.buf.append(line)
445
445
446 def readline(self):
446 def readline(self):
447 if self.buf:
447 if self.buf:
448 l = self.buf[0]
448 l = self.buf[0]
449 del self.buf[0]
449 del self.buf[0]
450 return l
450 return l
451 return self.fp.readline()
451 return self.fp.readline()
452
452
453 def __iter__(self):
453 def __iter__(self):
454 return iter(self.readline, b'')
454 return iter(self.readline, b'')
455
455
456
456
457 class abstractbackend:
457 class abstractbackend:
458 def __init__(self, ui):
458 def __init__(self, ui):
459 self.ui = ui
459 self.ui = ui
460
460
461 def getfile(self, fname):
461 def getfile(self, fname):
462 """Return target file data and flags as a (data, (islink,
462 """Return target file data and flags as a (data, (islink,
463 isexec)) tuple. Data is None if file is missing/deleted.
463 isexec)) tuple. Data is None if file is missing/deleted.
464 """
464 """
465 raise NotImplementedError
465 raise NotImplementedError
466
466
467 def setfile(self, fname, data, mode, copysource):
467 def setfile(self, fname, data, mode, copysource):
468 """Write data to target file fname and set its mode. mode is a
468 """Write data to target file fname and set its mode. mode is a
469 (islink, isexec) tuple. If data is None, the file content should
469 (islink, isexec) tuple. If data is None, the file content should
470 be left unchanged. If the file is modified after being copied,
470 be left unchanged. If the file is modified after being copied,
471 copysource is set to the original file name.
471 copysource is set to the original file name.
472 """
472 """
473 raise NotImplementedError
473 raise NotImplementedError
474
474
475 def unlink(self, fname):
475 def unlink(self, fname):
476 """Unlink target file."""
476 """Unlink target file."""
477 raise NotImplementedError
477 raise NotImplementedError
478
478
479 def writerej(self, fname, failed, total, lines):
479 def writerej(self, fname, failed, total, lines):
480 """Write rejected lines for fname. total is the number of hunks
480 """Write rejected lines for fname. total is the number of hunks
481 which failed to apply and total the total number of hunks for this
481 which failed to apply and total the total number of hunks for this
482 files.
482 files.
483 """
483 """
484
484
485 def exists(self, fname):
485 def exists(self, fname):
486 raise NotImplementedError
486 raise NotImplementedError
487
487
488 def close(self):
488 def close(self):
489 raise NotImplementedError
489 raise NotImplementedError
490
490
491
491
492 class fsbackend(abstractbackend):
492 class fsbackend(abstractbackend):
493 def __init__(self, ui, basedir):
493 def __init__(self, ui, basedir):
494 super(fsbackend, self).__init__(ui)
494 super(fsbackend, self).__init__(ui)
495 self.opener = vfsmod.vfs(basedir)
495 self.opener = vfsmod.vfs(basedir)
496
496
497 def getfile(self, fname):
497 def getfile(self, fname):
498 if self.opener.islink(fname):
498 if self.opener.islink(fname):
499 return (self.opener.readlink(fname), (True, False))
499 return (self.opener.readlink(fname), (True, False))
500
500
501 isexec = False
501 isexec = False
502 try:
502 try:
503 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
503 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
504 except FileNotFoundError:
504 except FileNotFoundError:
505 pass
505 pass
506 try:
506 try:
507 return (self.opener.read(fname), (False, isexec))
507 return (self.opener.read(fname), (False, isexec))
508 except FileNotFoundError:
508 except FileNotFoundError:
509 return None, None
509 return None, None
510
510
511 def setfile(self, fname, data, mode, copysource):
511 def setfile(self, fname, data, mode, copysource):
512 islink, isexec = mode
512 islink, isexec = mode
513 if data is None:
513 if data is None:
514 self.opener.setflags(fname, islink, isexec)
514 self.opener.setflags(fname, islink, isexec)
515 return
515 return
516 if islink:
516 if islink:
517 self.opener.symlink(data, fname)
517 self.opener.symlink(data, fname)
518 else:
518 else:
519 self.opener.write(fname, data)
519 self.opener.write(fname, data)
520 if isexec:
520 if isexec:
521 self.opener.setflags(fname, False, True)
521 self.opener.setflags(fname, False, True)
522
522
523 def unlink(self, fname):
523 def unlink(self, fname):
524 rmdir = self.ui.configbool(b'experimental', b'removeemptydirs')
524 rmdir = self.ui.configbool(b'experimental', b'removeemptydirs')
525 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
525 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
526
526
527 def writerej(self, fname, failed, total, lines):
527 def writerej(self, fname, failed, total, lines):
528 fname = fname + b".rej"
528 fname = fname + b".rej"
529 self.ui.warn(
529 self.ui.warn(
530 _(b"%d out of %d hunks FAILED -- saving rejects to file %s\n")
530 _(b"%d out of %d hunks FAILED -- saving rejects to file %s\n")
531 % (failed, total, fname)
531 % (failed, total, fname)
532 )
532 )
533 fp = self.opener(fname, b'w')
533 fp = self.opener(fname, b'w')
534 fp.writelines(lines)
534 fp.writelines(lines)
535 fp.close()
535 fp.close()
536
536
537 def exists(self, fname):
537 def exists(self, fname):
538 return self.opener.lexists(fname)
538 return self.opener.lexists(fname)
539
539
540
540
541 class workingbackend(fsbackend):
541 class workingbackend(fsbackend):
542 def __init__(self, ui, repo, similarity):
542 def __init__(self, ui, repo, similarity):
543 super(workingbackend, self).__init__(ui, repo.root)
543 super(workingbackend, self).__init__(ui, repo.root)
544 self.repo = repo
544 self.repo = repo
545 self.similarity = similarity
545 self.similarity = similarity
546 self.removed = set()
546 self.removed = set()
547 self.changed = set()
547 self.changed = set()
548 self.copied = []
548 self.copied = []
549
549
550 def _checkknown(self, fname):
550 def _checkknown(self, fname):
551 if not self.repo.dirstate.get_entry(fname).any_tracked and self.exists(
551 if not self.repo.dirstate.get_entry(fname).any_tracked and self.exists(
552 fname
552 fname
553 ):
553 ):
554 raise PatchApplicationError(
554 raise PatchApplicationError(
555 _(b'cannot patch %s: file is not tracked') % fname
555 _(b'cannot patch %s: file is not tracked') % fname
556 )
556 )
557
557
558 def setfile(self, fname, data, mode, copysource):
558 def setfile(self, fname, data, mode, copysource):
559 self._checkknown(fname)
559 self._checkknown(fname)
560 super(workingbackend, self).setfile(fname, data, mode, copysource)
560 super(workingbackend, self).setfile(fname, data, mode, copysource)
561 if copysource is not None:
561 if copysource is not None:
562 self.copied.append((copysource, fname))
562 self.copied.append((copysource, fname))
563 self.changed.add(fname)
563 self.changed.add(fname)
564
564
565 def unlink(self, fname):
565 def unlink(self, fname):
566 self._checkknown(fname)
566 self._checkknown(fname)
567 super(workingbackend, self).unlink(fname)
567 super(workingbackend, self).unlink(fname)
568 self.removed.add(fname)
568 self.removed.add(fname)
569 self.changed.add(fname)
569 self.changed.add(fname)
570
570
571 def close(self):
571 def close(self):
572 with self.repo.dirstate.changing_files(self.repo):
572 with self.repo.dirstate.changing_files(self.repo):
573 wctx = self.repo[None]
573 wctx = self.repo[None]
574 changed = set(self.changed)
574 changed = set(self.changed)
575 for src, dst in self.copied:
575 for src, dst in self.copied:
576 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
576 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
577 if self.removed:
577 if self.removed:
578 wctx.forget(sorted(self.removed))
578 wctx.forget(sorted(self.removed))
579 for f in self.removed:
579 for f in self.removed:
580 if f not in self.repo.dirstate:
580 if f not in self.repo.dirstate:
581 # File was deleted and no longer belongs to the
581 # File was deleted and no longer belongs to the
582 # dirstate, it was probably marked added then
582 # dirstate, it was probably marked added then
583 # deleted, and should not be considered by
583 # deleted, and should not be considered by
584 # marktouched().
584 # marktouched().
585 changed.discard(f)
585 changed.discard(f)
586 if changed:
586 if changed:
587 scmutil.marktouched(self.repo, changed, self.similarity)
587 scmutil.marktouched(self.repo, changed, self.similarity)
588 return sorted(self.changed)
588 return sorted(self.changed)
589
589
590
590
591 class filestore:
591 class filestore:
592 def __init__(self, maxsize=None):
592 def __init__(self, maxsize=None):
593 self.opener = None
593 self.opener = None
594 self.files = {}
594 self.files = {}
595 self.created = 0
595 self.created = 0
596 self.maxsize = maxsize
596 self.maxsize = maxsize
597 if self.maxsize is None:
597 if self.maxsize is None:
598 self.maxsize = 4 * (2**20)
598 self.maxsize = 4 * (2**20)
599 self.size = 0
599 self.size = 0
600 self.data = {}
600 self.data = {}
601
601
602 def setfile(self, fname, data, mode, copied=None):
602 def setfile(self, fname, data, mode, copied=None):
603 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
603 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
604 self.data[fname] = (data, mode, copied)
604 self.data[fname] = (data, mode, copied)
605 self.size += len(data)
605 self.size += len(data)
606 else:
606 else:
607 if self.opener is None:
607 if self.opener is None:
608 root = pycompat.mkdtemp(prefix=b'hg-patch-')
608 root = pycompat.mkdtemp(prefix=b'hg-patch-')
609 self.opener = vfsmod.vfs(root)
609 self.opener = vfsmod.vfs(root)
610 # Avoid filename issues with these simple names
610 # Avoid filename issues with these simple names
611 fn = b'%d' % self.created
611 fn = b'%d' % self.created
612 self.opener.write(fn, data)
612 self.opener.write(fn, data)
613 self.created += 1
613 self.created += 1
614 self.files[fname] = (fn, mode, copied)
614 self.files[fname] = (fn, mode, copied)
615
615
616 def getfile(self, fname):
616 def getfile(self, fname):
617 if fname in self.data:
617 if fname in self.data:
618 return self.data[fname]
618 return self.data[fname]
619 if not self.opener or fname not in self.files:
619 if not self.opener or fname not in self.files:
620 return None, None, None
620 return None, None, None
621 fn, mode, copied = self.files[fname]
621 fn, mode, copied = self.files[fname]
622 return self.opener.read(fn), mode, copied
622 return self.opener.read(fn), mode, copied
623
623
624 def close(self):
624 def close(self):
625 if self.opener:
625 if self.opener:
626 shutil.rmtree(self.opener.base)
626 shutil.rmtree(self.opener.base)
627
627
628
628
629 class repobackend(abstractbackend):
629 class repobackend(abstractbackend):
630 def __init__(self, ui, repo, ctx, store):
630 def __init__(self, ui, repo, ctx, store):
631 super(repobackend, self).__init__(ui)
631 super(repobackend, self).__init__(ui)
632 self.repo = repo
632 self.repo = repo
633 self.ctx = ctx
633 self.ctx = ctx
634 self.store = store
634 self.store = store
635 self.changed = set()
635 self.changed = set()
636 self.removed = set()
636 self.removed = set()
637 self.copied = {}
637 self.copied = {}
638
638
639 def _checkknown(self, fname):
639 def _checkknown(self, fname):
640 if fname not in self.ctx:
640 if fname not in self.ctx:
641 raise PatchApplicationError(
641 raise PatchApplicationError(
642 _(b'cannot patch %s: file is not tracked') % fname
642 _(b'cannot patch %s: file is not tracked') % fname
643 )
643 )
644
644
645 def getfile(self, fname):
645 def getfile(self, fname):
646 try:
646 try:
647 fctx = self.ctx[fname]
647 fctx = self.ctx[fname]
648 except error.LookupError:
648 except error.LookupError:
649 return None, None
649 return None, None
650 flags = fctx.flags()
650 flags = fctx.flags()
651 return fctx.data(), (b'l' in flags, b'x' in flags)
651 return fctx.data(), (b'l' in flags, b'x' in flags)
652
652
653 def setfile(self, fname, data, mode, copysource):
653 def setfile(self, fname, data, mode, copysource):
654 if copysource:
654 if copysource:
655 self._checkknown(copysource)
655 self._checkknown(copysource)
656 if data is None:
656 if data is None:
657 data = self.ctx[fname].data()
657 data = self.ctx[fname].data()
658 self.store.setfile(fname, data, mode, copysource)
658 self.store.setfile(fname, data, mode, copysource)
659 self.changed.add(fname)
659 self.changed.add(fname)
660 if copysource:
660 if copysource:
661 self.copied[fname] = copysource
661 self.copied[fname] = copysource
662
662
663 def unlink(self, fname):
663 def unlink(self, fname):
664 self._checkknown(fname)
664 self._checkknown(fname)
665 self.removed.add(fname)
665 self.removed.add(fname)
666
666
667 def exists(self, fname):
667 def exists(self, fname):
668 return fname in self.ctx
668 return fname in self.ctx
669
669
670 def close(self):
670 def close(self):
671 return self.changed | self.removed
671 return self.changed | self.removed
672
672
673
673
674 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
674 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
675 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
675 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
676 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
676 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
677 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
677 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
678
678
679
679
680 class patchfile:
680 class patchfile:
681 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
681 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
682 self.fname = gp.path
682 self.fname = gp.path
683 self.eolmode = eolmode
683 self.eolmode = eolmode
684 self.eol = None
684 self.eol = None
685 self.backend = backend
685 self.backend = backend
686 self.ui = ui
686 self.ui = ui
687 self.lines = []
687 self.lines = []
688 self.exists = False
688 self.exists = False
689 self.missing = True
689 self.missing = True
690 self.mode = gp.mode
690 self.mode = gp.mode
691 self.copysource = gp.oldpath
691 self.copysource = gp.oldpath
692 self.create = gp.op in (b'ADD', b'COPY', b'RENAME')
692 self.create = gp.op in (b'ADD', b'COPY', b'RENAME')
693 self.remove = gp.op == b'DELETE'
693 self.remove = gp.op == b'DELETE'
694 if self.copysource is None:
694 if self.copysource is None:
695 data, mode = backend.getfile(self.fname)
695 data, mode = backend.getfile(self.fname)
696 else:
696 else:
697 data, mode = store.getfile(self.copysource)[:2]
697 data, mode = store.getfile(self.copysource)[:2]
698 if data is not None:
698 if data is not None:
699 self.exists = self.copysource is None or backend.exists(self.fname)
699 self.exists = self.copysource is None or backend.exists(self.fname)
700 self.missing = False
700 self.missing = False
701 if data:
701 if data:
702 self.lines = mdiff.splitnewlines(data)
702 self.lines = mdiff.splitnewlines(data)
703 if self.mode is None:
703 if self.mode is None:
704 self.mode = mode
704 self.mode = mode
705 if self.lines:
705 if self.lines:
706 # Normalize line endings
706 # Normalize line endings
707 if self.lines[0].endswith(b'\r\n'):
707 if self.lines[0].endswith(b'\r\n'):
708 self.eol = b'\r\n'
708 self.eol = b'\r\n'
709 elif self.lines[0].endswith(b'\n'):
709 elif self.lines[0].endswith(b'\n'):
710 self.eol = b'\n'
710 self.eol = b'\n'
711 if eolmode != b'strict':
711 if eolmode != b'strict':
712 nlines = []
712 nlines = []
713 for l in self.lines:
713 for l in self.lines:
714 if l.endswith(b'\r\n'):
714 if l.endswith(b'\r\n'):
715 l = l[:-2] + b'\n'
715 l = l[:-2] + b'\n'
716 nlines.append(l)
716 nlines.append(l)
717 self.lines = nlines
717 self.lines = nlines
718 else:
718 else:
719 if self.create:
719 if self.create:
720 self.missing = False
720 self.missing = False
721 if self.mode is None:
721 if self.mode is None:
722 self.mode = (False, False)
722 self.mode = (False, False)
723 if self.missing:
723 if self.missing:
724 self.ui.warn(_(b"unable to find '%s' for patching\n") % self.fname)
724 self.ui.warn(_(b"unable to find '%s' for patching\n") % self.fname)
725 self.ui.warn(
725 self.ui.warn(
726 _(
726 _(
727 b"(use '--prefix' to apply patch relative to the "
727 b"(use '--prefix' to apply patch relative to the "
728 b"current directory)\n"
728 b"current directory)\n"
729 )
729 )
730 )
730 )
731
731
732 self.hash = {}
732 self.hash = {}
733 self.dirty = 0
733 self.dirty = 0
734 self.offset = 0
734 self.offset = 0
735 self.skew = 0
735 self.skew = 0
736 self.rej = []
736 self.rej = []
737 self.fileprinted = False
737 self.fileprinted = False
738 self.printfile(False)
738 self.printfile(False)
739 self.hunks = 0
739 self.hunks = 0
740
740
741 def writelines(self, fname, lines, mode):
741 def writelines(self, fname, lines, mode):
742 if self.eolmode == b'auto':
742 if self.eolmode == b'auto':
743 eol = self.eol
743 eol = self.eol
744 elif self.eolmode == b'crlf':
744 elif self.eolmode == b'crlf':
745 eol = b'\r\n'
745 eol = b'\r\n'
746 else:
746 else:
747 eol = b'\n'
747 eol = b'\n'
748
748
749 if self.eolmode != b'strict' and eol and eol != b'\n':
749 if self.eolmode != b'strict' and eol and eol != b'\n':
750 rawlines = []
750 rawlines = []
751 for l in lines:
751 for l in lines:
752 if l and l.endswith(b'\n'):
752 if l and l.endswith(b'\n'):
753 l = l[:-1] + eol
753 l = l[:-1] + eol
754 rawlines.append(l)
754 rawlines.append(l)
755 lines = rawlines
755 lines = rawlines
756
756
757 self.backend.setfile(fname, b''.join(lines), mode, self.copysource)
757 self.backend.setfile(fname, b''.join(lines), mode, self.copysource)
758
758
759 def printfile(self, warn):
759 def printfile(self, warn):
760 if self.fileprinted:
760 if self.fileprinted:
761 return
761 return
762 if warn or self.ui.verbose:
762 if warn or self.ui.verbose:
763 self.fileprinted = True
763 self.fileprinted = True
764 s = _(b"patching file %s\n") % self.fname
764 s = _(b"patching file %s\n") % self.fname
765 if warn:
765 if warn:
766 self.ui.warn(s)
766 self.ui.warn(s)
767 else:
767 else:
768 self.ui.note(s)
768 self.ui.note(s)
769
769
770 def findlines(self, l, linenum):
770 def findlines(self, l, linenum):
771 # looks through the hash and finds candidate lines. The
771 # looks through the hash and finds candidate lines. The
772 # result is a list of line numbers sorted based on distance
772 # result is a list of line numbers sorted based on distance
773 # from linenum
773 # from linenum
774
774
775 cand = self.hash.get(l, [])
775 cand = self.hash.get(l, [])
776 if len(cand) > 1:
776 if len(cand) > 1:
777 # resort our list of potentials forward then back.
777 # resort our list of potentials forward then back.
778 cand.sort(key=lambda x: abs(x - linenum))
778 cand.sort(key=lambda x: abs(x - linenum))
779 return cand
779 return cand
780
780
781 def write_rej(self):
781 def write_rej(self):
782 # our rejects are a little different from patch(1). This always
782 # our rejects are a little different from patch(1). This always
783 # creates rejects in the same form as the original patch. A file
783 # creates rejects in the same form as the original patch. A file
784 # header is inserted so that you can run the reject through patch again
784 # header is inserted so that you can run the reject through patch again
785 # without having to type the filename.
785 # without having to type the filename.
786 if not self.rej:
786 if not self.rej:
787 return
787 return
788 base = os.path.basename(self.fname)
788 base = os.path.basename(self.fname)
789 lines = [b"--- %s\n+++ %s\n" % (base, base)]
789 lines = [b"--- %s\n+++ %s\n" % (base, base)]
790 for x in self.rej:
790 for x in self.rej:
791 for l in x.hunk:
791 for l in x.hunk:
792 lines.append(l)
792 lines.append(l)
793 if l[-1:] != b'\n':
793 if l[-1:] != b'\n':
794 lines.append(b'\n' + diffhelper.MISSING_NEWLINE_MARKER)
794 lines.append(b'\n' + diffhelper.MISSING_NEWLINE_MARKER)
795 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
795 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
796
796
797 def apply(self, h):
797 def apply(self, h):
798 if not h.complete():
798 if not h.complete():
799 raise PatchParseError(
799 raise PatchParseError(
800 _(b"bad hunk #%d %s (%d %d %d %d)")
800 _(b"bad hunk #%d %s (%d %d %d %d)")
801 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
801 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
802 )
802 )
803
803
804 self.hunks += 1
804 self.hunks += 1
805
805
806 if self.missing:
806 if self.missing:
807 self.rej.append(h)
807 self.rej.append(h)
808 return -1
808 return -1
809
809
810 if self.exists and self.create:
810 if self.exists and self.create:
811 if self.copysource:
811 if self.copysource:
812 self.ui.warn(
812 self.ui.warn(
813 _(b"cannot create %s: destination already exists\n")
813 _(b"cannot create %s: destination already exists\n")
814 % self.fname
814 % self.fname
815 )
815 )
816 else:
816 else:
817 self.ui.warn(_(b"file %s already exists\n") % self.fname)
817 self.ui.warn(_(b"file %s already exists\n") % self.fname)
818 self.rej.append(h)
818 self.rej.append(h)
819 return -1
819 return -1
820
820
821 if isinstance(h, binhunk):
821 if isinstance(h, binhunk):
822 if self.remove:
822 if self.remove:
823 self.backend.unlink(self.fname)
823 self.backend.unlink(self.fname)
824 else:
824 else:
825 l = h.new(self.lines)
825 l = h.new(self.lines)
826 self.lines[:] = l
826 self.lines[:] = l
827 self.offset += len(l)
827 self.offset += len(l)
828 self.dirty = True
828 self.dirty = True
829 return 0
829 return 0
830
830
831 horig = h
831 horig = h
832 if (
832 if (
833 self.eolmode in (b'crlf', b'lf')
833 self.eolmode in (b'crlf', b'lf')
834 or self.eolmode == b'auto'
834 or self.eolmode == b'auto'
835 and self.eol
835 and self.eol
836 ):
836 ):
837 # If new eols are going to be normalized, then normalize
837 # If new eols are going to be normalized, then normalize
838 # hunk data before patching. Otherwise, preserve input
838 # hunk data before patching. Otherwise, preserve input
839 # line-endings.
839 # line-endings.
840 h = h.getnormalized()
840 h = h.getnormalized()
841
841
842 # fast case first, no offsets, no fuzz
842 # fast case first, no offsets, no fuzz
843 old, oldstart, new, newstart = h.fuzzit(0, False)
843 old, oldstart, new, newstart = h.fuzzit(0, False)
844 oldstart += self.offset
844 oldstart += self.offset
845 orig_start = oldstart
845 orig_start = oldstart
846 # if there's skew we want to emit the "(offset %d lines)" even
846 # if there's skew we want to emit the "(offset %d lines)" even
847 # when the hunk cleanly applies at start + skew, so skip the
847 # when the hunk cleanly applies at start + skew, so skip the
848 # fast case code
848 # fast case code
849 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
849 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
850 if self.remove:
850 if self.remove:
851 self.backend.unlink(self.fname)
851 self.backend.unlink(self.fname)
852 else:
852 else:
853 self.lines[oldstart : oldstart + len(old)] = new
853 self.lines[oldstart : oldstart + len(old)] = new
854 self.offset += len(new) - len(old)
854 self.offset += len(new) - len(old)
855 self.dirty = True
855 self.dirty = True
856 return 0
856 return 0
857
857
858 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
858 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
859 self.hash = {}
859 self.hash = {}
860 for x, s in enumerate(self.lines):
860 for x, s in enumerate(self.lines):
861 self.hash.setdefault(s, []).append(x)
861 self.hash.setdefault(s, []).append(x)
862
862
863 for fuzzlen in range(self.ui.configint(b"patch", b"fuzz") + 1):
863 for fuzzlen in range(self.ui.configint(b"patch", b"fuzz") + 1):
864 for toponly in [True, False]:
864 for toponly in [True, False]:
865 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
865 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
866 oldstart = oldstart + self.offset + self.skew
866 oldstart = oldstart + self.offset + self.skew
867 oldstart = min(oldstart, len(self.lines))
867 oldstart = min(oldstart, len(self.lines))
868 if old:
868 if old:
869 cand = self.findlines(old[0][1:], oldstart)
869 cand = self.findlines(old[0][1:], oldstart)
870 else:
870 else:
871 # Only adding lines with no or fuzzed context, just
871 # Only adding lines with no or fuzzed context, just
872 # take the skew in account
872 # take the skew in account
873 cand = [oldstart]
873 cand = [oldstart]
874
874
875 for l in cand:
875 for l in cand:
876 if not old or diffhelper.testhunk(old, self.lines, l):
876 if not old or diffhelper.testhunk(old, self.lines, l):
877 self.lines[l : l + len(old)] = new
877 self.lines[l : l + len(old)] = new
878 self.offset += len(new) - len(old)
878 self.offset += len(new) - len(old)
879 self.skew = l - orig_start
879 self.skew = l - orig_start
880 self.dirty = True
880 self.dirty = True
881 offset = l - orig_start - fuzzlen
881 offset = l - orig_start - fuzzlen
882 if fuzzlen:
882 if fuzzlen:
883 msg = _(
883 msg = _(
884 b"Hunk #%d succeeded at %d "
884 b"Hunk #%d succeeded at %d "
885 b"with fuzz %d "
885 b"with fuzz %d "
886 b"(offset %d lines).\n"
886 b"(offset %d lines).\n"
887 )
887 )
888 self.printfile(True)
888 self.printfile(True)
889 self.ui.warn(
889 self.ui.warn(
890 msg % (h.number, l + 1, fuzzlen, offset)
890 msg % (h.number, l + 1, fuzzlen, offset)
891 )
891 )
892 else:
892 else:
893 msg = _(
893 msg = _(
894 b"Hunk #%d succeeded at %d "
894 b"Hunk #%d succeeded at %d "
895 b"(offset %d lines).\n"
895 b"(offset %d lines).\n"
896 )
896 )
897 self.ui.note(msg % (h.number, l + 1, offset))
897 self.ui.note(msg % (h.number, l + 1, offset))
898 return fuzzlen
898 return fuzzlen
899 self.printfile(True)
899 self.printfile(True)
900 self.ui.warn(_(b"Hunk #%d FAILED at %d\n") % (h.number, orig_start))
900 self.ui.warn(_(b"Hunk #%d FAILED at %d\n") % (h.number, orig_start))
901 self.rej.append(horig)
901 self.rej.append(horig)
902 return -1
902 return -1
903
903
904 def close(self):
904 def close(self):
905 if self.dirty:
905 if self.dirty:
906 self.writelines(self.fname, self.lines, self.mode)
906 self.writelines(self.fname, self.lines, self.mode)
907 self.write_rej()
907 self.write_rej()
908 return len(self.rej)
908 return len(self.rej)
909
909
910
910
911 class header:
911 class header:
912 """patch header"""
912 """patch header"""
913
913
914 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
914 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
915 diff_re = re.compile(b'diff -r .* (.*)$')
915 diff_re = re.compile(b'diff -r .* (.*)$')
916 allhunks_re = re.compile(b'(?:index|deleted file) ')
916 allhunks_re = re.compile(b'(?:index|deleted file) ')
917 pretty_re = re.compile(b'(?:new file|deleted file) ')
917 pretty_re = re.compile(b'(?:new file|deleted file) ')
918 special_re = re.compile(b'(?:index|deleted|copy|rename|new mode) ')
918 special_re = re.compile(b'(?:index|deleted|copy|rename|new mode) ')
919 newfile_re = re.compile(b'(?:new file|copy to|rename to)')
919 newfile_re = re.compile(b'(?:new file|copy to|rename to)')
920
920
921 def __init__(self, header):
921 def __init__(self, header):
922 self.header = header
922 self.header = header
923 self.hunks = []
923 self.hunks = []
924
924
925 def binary(self):
925 def binary(self):
926 return any(h.startswith(b'index ') for h in self.header)
926 return any(h.startswith(b'index ') for h in self.header)
927
927
928 def pretty(self, fp):
928 def pretty(self, fp):
929 for h in self.header:
929 for h in self.header:
930 if h.startswith(b'index '):
930 if h.startswith(b'index '):
931 fp.write(_(b'this modifies a binary file (all or nothing)\n'))
931 fp.write(_(b'this modifies a binary file (all or nothing)\n'))
932 break
932 break
933 if self.pretty_re.match(h):
933 if self.pretty_re.match(h):
934 fp.write(h)
934 fp.write(h)
935 if self.binary():
935 if self.binary():
936 fp.write(_(b'this is a binary file\n'))
936 fp.write(_(b'this is a binary file\n'))
937 break
937 break
938 if h.startswith(b'---'):
938 if h.startswith(b'---'):
939 fp.write(
939 fp.write(
940 _(b'%d hunks, %d lines changed\n')
940 _(b'%d hunks, %d lines changed\n')
941 % (
941 % (
942 len(self.hunks),
942 len(self.hunks),
943 sum([max(h.added, h.removed) for h in self.hunks]),
943 sum([max(h.added, h.removed) for h in self.hunks]),
944 )
944 )
945 )
945 )
946 break
946 break
947 fp.write(h)
947 fp.write(h)
948
948
949 def write(self, fp):
949 def write(self, fp):
950 fp.write(b''.join(self.header))
950 fp.write(b''.join(self.header))
951
951
952 def allhunks(self):
952 def allhunks(self):
953 return any(self.allhunks_re.match(h) for h in self.header)
953 return any(self.allhunks_re.match(h) for h in self.header)
954
954
955 def files(self):
955 def files(self):
956 match = self.diffgit_re.match(self.header[0])
956 match = self.diffgit_re.match(self.header[0])
957 if match:
957 if match:
958 fromfile, tofile = match.groups()
958 fromfile, tofile = match.groups()
959 if fromfile == tofile:
959 if fromfile == tofile:
960 return [fromfile]
960 return [fromfile]
961 return [fromfile, tofile]
961 return [fromfile, tofile]
962 else:
962 else:
963 return self.diff_re.match(self.header[0]).groups()
963 return self.diff_re.match(self.header[0]).groups()
964
964
965 def filename(self):
965 def filename(self):
966 return self.files()[-1]
966 return self.files()[-1]
967
967
968 def __repr__(self):
968 def __repr__(self):
969 return '<header %s>' % (
969 return '<header %s>' % (
970 ' '.join(pycompat.rapply(pycompat.fsdecode, self.files()))
970 ' '.join(pycompat.rapply(pycompat.fsdecode, self.files()))
971 )
971 )
972
972
973 def isnewfile(self):
973 def isnewfile(self):
974 return any(self.newfile_re.match(h) for h in self.header)
974 return any(self.newfile_re.match(h) for h in self.header)
975
975
976 def special(self):
976 def special(self):
977 # Special files are shown only at the header level and not at the hunk
977 # Special files are shown only at the header level and not at the hunk
978 # level for example a file that has been deleted is a special file.
978 # level for example a file that has been deleted is a special file.
979 # The user cannot change the content of the operation, in the case of
979 # The user cannot change the content of the operation, in the case of
980 # the deleted file he has to take the deletion or not take it, he
980 # the deleted file he has to take the deletion or not take it, he
981 # cannot take some of it.
981 # cannot take some of it.
982 # Newly added files are special if they are empty, they are not special
982 # Newly added files are special if they are empty, they are not special
983 # if they have some content as we want to be able to change it
983 # if they have some content as we want to be able to change it
984 nocontent = len(self.header) == 2
984 nocontent = len(self.header) == 2
985 emptynewfile = self.isnewfile() and nocontent
985 emptynewfile = self.isnewfile() and nocontent
986 return emptynewfile or any(
986 return emptynewfile or any(
987 self.special_re.match(h) for h in self.header
987 self.special_re.match(h) for h in self.header
988 )
988 )
989
989
990
990
991 class recordhunk:
991 class recordhunk:
992 """patch hunk
992 """patch hunk
993
993
994 XXX shouldn't we merge this with the other hunk class?
994 XXX shouldn't we merge this with the other hunk class?
995 """
995 """
996
996
997 def __init__(
997 def __init__(
998 self,
998 self,
999 header,
999 header,
1000 fromline,
1000 fromline,
1001 toline,
1001 toline,
1002 proc,
1002 proc,
1003 before,
1003 before,
1004 hunk,
1004 hunk,
1005 after,
1005 after,
1006 maxcontext=None,
1006 maxcontext=None,
1007 ):
1007 ):
1008 def trimcontext(lines, reverse=False):
1008 def trimcontext(lines, reverse=False):
1009 if maxcontext is not None:
1009 if maxcontext is not None:
1010 delta = len(lines) - maxcontext
1010 delta = len(lines) - maxcontext
1011 if delta > 0:
1011 if delta > 0:
1012 if reverse:
1012 if reverse:
1013 return delta, lines[delta:]
1013 return delta, lines[delta:]
1014 else:
1014 else:
1015 return delta, lines[:maxcontext]
1015 return delta, lines[:maxcontext]
1016 return 0, lines
1016 return 0, lines
1017
1017
1018 self.header = header
1018 self.header = header
1019 trimedbefore, self.before = trimcontext(before, True)
1019 trimedbefore, self.before = trimcontext(before, True)
1020 self.fromline = fromline + trimedbefore
1020 self.fromline = fromline + trimedbefore
1021 self.toline = toline + trimedbefore
1021 self.toline = toline + trimedbefore
1022 _trimedafter, self.after = trimcontext(after, False)
1022 _trimedafter, self.after = trimcontext(after, False)
1023 self.proc = proc
1023 self.proc = proc
1024 self.hunk = hunk
1024 self.hunk = hunk
1025 self.added, self.removed = self.countchanges(self.hunk)
1025 self.added, self.removed = self.countchanges(self.hunk)
1026
1026
1027 def __eq__(self, v):
1027 def __eq__(self, v):
1028 if not isinstance(v, recordhunk):
1028 if not isinstance(v, recordhunk):
1029 return False
1029 return False
1030
1030
1031 return (
1031 return (
1032 (v.hunk == self.hunk)
1032 (v.hunk == self.hunk)
1033 and (v.proc == self.proc)
1033 and (v.proc == self.proc)
1034 and (self.fromline == v.fromline)
1034 and (self.fromline == v.fromline)
1035 and (self.header.files() == v.header.files())
1035 and (self.header.files() == v.header.files())
1036 )
1036 )
1037
1037
1038 def __hash__(self):
1038 def __hash__(self):
1039 return hash(
1039 return hash(
1040 (
1040 (
1041 tuple(self.hunk),
1041 tuple(self.hunk),
1042 tuple(self.header.files()),
1042 tuple(self.header.files()),
1043 self.fromline,
1043 self.fromline,
1044 self.proc,
1044 self.proc,
1045 )
1045 )
1046 )
1046 )
1047
1047
1048 def countchanges(self, hunk):
1048 def countchanges(self, hunk):
1049 """hunk -> (n+,n-)"""
1049 """hunk -> (n+,n-)"""
1050 add = len([h for h in hunk if h.startswith(b'+')])
1050 add = len([h for h in hunk if h.startswith(b'+')])
1051 rem = len([h for h in hunk if h.startswith(b'-')])
1051 rem = len([h for h in hunk if h.startswith(b'-')])
1052 return add, rem
1052 return add, rem
1053
1053
1054 def reversehunk(self):
1054 def reversehunk(self):
1055 """return another recordhunk which is the reverse of the hunk
1055 """return another recordhunk which is the reverse of the hunk
1056
1056
1057 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
1057 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
1058 that, swap fromline/toline and +/- signs while keep other things
1058 that, swap fromline/toline and +/- signs while keep other things
1059 unchanged.
1059 unchanged.
1060 """
1060 """
1061 m = {b'+': b'-', b'-': b'+', b'\\': b'\\'}
1061 m = {b'+': b'-', b'-': b'+', b'\\': b'\\'}
1062 hunk = [b'%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
1062 hunk = [b'%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
1063 return recordhunk(
1063 return recordhunk(
1064 self.header,
1064 self.header,
1065 self.toline,
1065 self.toline,
1066 self.fromline,
1066 self.fromline,
1067 self.proc,
1067 self.proc,
1068 self.before,
1068 self.before,
1069 hunk,
1069 hunk,
1070 self.after,
1070 self.after,
1071 )
1071 )
1072
1072
1073 def write(self, fp):
1073 def write(self, fp):
1074 delta = len(self.before) + len(self.after)
1074 delta = len(self.before) + len(self.after)
1075 if self.after and self.after[-1] == diffhelper.MISSING_NEWLINE_MARKER:
1075 if self.after and self.after[-1] == diffhelper.MISSING_NEWLINE_MARKER:
1076 delta -= 1
1076 delta -= 1
1077 fromlen = delta + self.removed
1077 fromlen = delta + self.removed
1078 tolen = delta + self.added
1078 tolen = delta + self.added
1079 fp.write(
1079 fp.write(
1080 b'@@ -%d,%d +%d,%d @@%s\n'
1080 b'@@ -%d,%d +%d,%d @@%s\n'
1081 % (
1081 % (
1082 self.fromline,
1082 self.fromline,
1083 fromlen,
1083 fromlen,
1084 self.toline,
1084 self.toline,
1085 tolen,
1085 tolen,
1086 self.proc and (b' ' + self.proc),
1086 self.proc and (b' ' + self.proc),
1087 )
1087 )
1088 )
1088 )
1089 fp.write(b''.join(self.before + self.hunk + self.after))
1089 fp.write(b''.join(self.before + self.hunk + self.after))
1090
1090
1091 pretty = write
1091 pretty = write
1092
1092
1093 def filename(self):
1093 def filename(self):
1094 return self.header.filename()
1094 return self.header.filename()
1095
1095
1096 @encoding.strmethod
1096 @encoding.strmethod
1097 def __repr__(self):
1097 def __repr__(self):
1098 return b'<hunk %r@%d>' % (self.filename(), self.fromline)
1098 return b'<hunk %r@%d>' % (self.filename(), self.fromline)
1099
1099
1100
1100
1101 def getmessages():
1101 def getmessages():
1102 return {
1102 return {
1103 b'multiple': {
1103 b'multiple': {
1104 b'apply': _(b"apply change %d/%d to '%s'?"),
1104 b'apply': _(b"apply change %d/%d to '%s'?"),
1105 b'discard': _(b"discard change %d/%d to '%s'?"),
1105 b'discard': _(b"discard change %d/%d to '%s'?"),
1106 b'keep': _(b"keep change %d/%d to '%s'?"),
1106 b'keep': _(b"keep change %d/%d to '%s'?"),
1107 b'record': _(b"record change %d/%d to '%s'?"),
1107 b'record': _(b"record change %d/%d to '%s'?"),
1108 },
1108 },
1109 b'single': {
1109 b'single': {
1110 b'apply': _(b"apply this change to '%s'?"),
1110 b'apply': _(b"apply this change to '%s'?"),
1111 b'discard': _(b"discard this change to '%s'?"),
1111 b'discard': _(b"discard this change to '%s'?"),
1112 b'keep': _(b"keep this change to '%s'?"),
1112 b'keep': _(b"keep this change to '%s'?"),
1113 b'record': _(b"record this change to '%s'?"),
1113 b'record': _(b"record this change to '%s'?"),
1114 },
1114 },
1115 b'help': {
1115 b'help': {
1116 b'apply': _(
1116 b'apply': _(
1117 b'[Ynesfdaq?]'
1117 b'[Ynesfdaq?]'
1118 b'$$ &Yes, apply this change'
1118 b'$$ &Yes, apply this change'
1119 b'$$ &No, skip this change'
1119 b'$$ &No, skip this change'
1120 b'$$ &Edit this change manually'
1120 b'$$ &Edit this change manually'
1121 b'$$ &Skip remaining changes to this file'
1121 b'$$ &Skip remaining changes to this file'
1122 b'$$ Apply remaining changes to this &file'
1122 b'$$ Apply remaining changes to this &file'
1123 b'$$ &Done, skip remaining changes and files'
1123 b'$$ &Done, skip remaining changes and files'
1124 b'$$ Apply &all changes to all remaining files'
1124 b'$$ Apply &all changes to all remaining files'
1125 b'$$ &Quit, applying no changes'
1125 b'$$ &Quit, applying no changes'
1126 b'$$ &? (display help)'
1126 b'$$ &? (display help)'
1127 ),
1127 ),
1128 b'discard': _(
1128 b'discard': _(
1129 b'[Ynesfdaq?]'
1129 b'[Ynesfdaq?]'
1130 b'$$ &Yes, discard this change'
1130 b'$$ &Yes, discard this change'
1131 b'$$ &No, skip this change'
1131 b'$$ &No, skip this change'
1132 b'$$ &Edit this change manually'
1132 b'$$ &Edit this change manually'
1133 b'$$ &Skip remaining changes to this file'
1133 b'$$ &Skip remaining changes to this file'
1134 b'$$ Discard remaining changes to this &file'
1134 b'$$ Discard remaining changes to this &file'
1135 b'$$ &Done, skip remaining changes and files'
1135 b'$$ &Done, skip remaining changes and files'
1136 b'$$ Discard &all changes to all remaining files'
1136 b'$$ Discard &all changes to all remaining files'
1137 b'$$ &Quit, discarding no changes'
1137 b'$$ &Quit, discarding no changes'
1138 b'$$ &? (display help)'
1138 b'$$ &? (display help)'
1139 ),
1139 ),
1140 b'keep': _(
1140 b'keep': _(
1141 b'[Ynesfdaq?]'
1141 b'[Ynesfdaq?]'
1142 b'$$ &Yes, keep this change'
1142 b'$$ &Yes, keep this change'
1143 b'$$ &No, skip this change'
1143 b'$$ &No, skip this change'
1144 b'$$ &Edit this change manually'
1144 b'$$ &Edit this change manually'
1145 b'$$ &Skip remaining changes to this file'
1145 b'$$ &Skip remaining changes to this file'
1146 b'$$ Keep remaining changes to this &file'
1146 b'$$ Keep remaining changes to this &file'
1147 b'$$ &Done, skip remaining changes and files'
1147 b'$$ &Done, skip remaining changes and files'
1148 b'$$ Keep &all changes to all remaining files'
1148 b'$$ Keep &all changes to all remaining files'
1149 b'$$ &Quit, keeping all changes'
1149 b'$$ &Quit, keeping all changes'
1150 b'$$ &? (display help)'
1150 b'$$ &? (display help)'
1151 ),
1151 ),
1152 b'record': _(
1152 b'record': _(
1153 b'[Ynesfdaq?]'
1153 b'[Ynesfdaq?]'
1154 b'$$ &Yes, record this change'
1154 b'$$ &Yes, record this change'
1155 b'$$ &No, skip this change'
1155 b'$$ &No, skip this change'
1156 b'$$ &Edit this change manually'
1156 b'$$ &Edit this change manually'
1157 b'$$ &Skip remaining changes to this file'
1157 b'$$ &Skip remaining changes to this file'
1158 b'$$ Record remaining changes to this &file'
1158 b'$$ Record remaining changes to this &file'
1159 b'$$ &Done, skip remaining changes and files'
1159 b'$$ &Done, skip remaining changes and files'
1160 b'$$ Record &all changes to all remaining files'
1160 b'$$ Record &all changes to all remaining files'
1161 b'$$ &Quit, recording no changes'
1161 b'$$ &Quit, recording no changes'
1162 b'$$ &? (display help)'
1162 b'$$ &? (display help)'
1163 ),
1163 ),
1164 },
1164 },
1165 }
1165 }
1166
1166
1167
1167
1168 def filterpatch(ui, headers, match, operation=None):
1168 def filterpatch(ui, headers, match, operation=None):
1169 """Interactively filter patch chunks into applied-only chunks"""
1169 """Interactively filter patch chunks into applied-only chunks"""
1170 messages = getmessages()
1170 messages = getmessages()
1171
1171
1172 if operation is None:
1172 if operation is None:
1173 operation = b'record'
1173 operation = b'record'
1174
1174
1175 def prompt(skipfile, skipall, query, chunk):
1175 def prompt(skipfile, skipall, query, chunk):
1176 """prompt query, and process base inputs
1176 """prompt query, and process base inputs
1177
1177
1178 - y/n for the rest of file
1178 - y/n for the rest of file
1179 - y/n for the rest
1179 - y/n for the rest
1180 - ? (help)
1180 - ? (help)
1181 - q (quit)
1181 - q (quit)
1182
1182
1183 Return True/False and possibly updated skipfile and skipall.
1183 Return True/False and possibly updated skipfile and skipall.
1184 """
1184 """
1185 newpatches = None
1185 newpatches = None
1186 if skipall is not None:
1186 if skipall is not None:
1187 return skipall, skipfile, skipall, newpatches
1187 return skipall, skipfile, skipall, newpatches
1188 if skipfile is not None:
1188 if skipfile is not None:
1189 return skipfile, skipfile, skipall, newpatches
1189 return skipfile, skipfile, skipall, newpatches
1190 while True:
1190 while True:
1191 resps = messages[b'help'][operation]
1191 resps = messages[b'help'][operation]
1192 # IMPORTANT: keep the last line of this prompt short (<40 english
1192 # IMPORTANT: keep the last line of this prompt short (<40 english
1193 # chars is a good target) because of issue6158.
1193 # chars is a good target) because of issue6158.
1194 r = ui.promptchoice(b"%s\n(enter ? for help) %s" % (query, resps))
1194 r = ui.promptchoice(b"%s\n(enter ? for help) %s" % (query, resps))
1195 ui.write(b"\n")
1195 ui.write(b"\n")
1196 if r == 8: # ?
1196 if r == 8: # ?
1197 for c, t in ui.extractchoices(resps)[1]:
1197 for c, t in ui.extractchoices(resps)[1]:
1198 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
1198 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
1199 continue
1199 continue
1200 elif r == 0: # yes
1200 elif r == 0: # yes
1201 ret = True
1201 ret = True
1202 elif r == 1: # no
1202 elif r == 1: # no
1203 ret = False
1203 ret = False
1204 elif r == 2: # Edit patch
1204 elif r == 2: # Edit patch
1205 if chunk is None:
1205 if chunk is None:
1206 ui.write(_(b'cannot edit patch for whole file'))
1206 ui.write(_(b'cannot edit patch for whole file'))
1207 ui.write(b"\n")
1207 ui.write(b"\n")
1208 continue
1208 continue
1209 if chunk.header.binary():
1209 if chunk.header.binary():
1210 ui.write(_(b'cannot edit patch for binary file'))
1210 ui.write(_(b'cannot edit patch for binary file'))
1211 ui.write(b"\n")
1211 ui.write(b"\n")
1212 continue
1212 continue
1213 # Patch comment based on the Git one (based on comment at end of
1213 # Patch comment based on the Git one (based on comment at end of
1214 # https://mercurial-scm.org/wiki/RecordExtension)
1214 # https://mercurial-scm.org/wiki/RecordExtension)
1215 phelp = b'---' + _(
1215 phelp = b'---' + _(
1216 b"""
1216 b"""
1217 To remove '-' lines, make them ' ' lines (context).
1217 To remove '-' lines, make them ' ' lines (context).
1218 To remove '+' lines, delete them.
1218 To remove '+' lines, delete them.
1219 Lines starting with # will be removed from the patch.
1219 Lines starting with # will be removed from the patch.
1220
1220
1221 If the patch applies cleanly, the edited hunk will immediately be
1221 If the patch applies cleanly, the edited hunk will immediately be
1222 added to the record list. If it does not apply cleanly, a rejects
1222 added to the record list. If it does not apply cleanly, a rejects
1223 file will be generated: you can use that when you try again. If
1223 file will be generated: you can use that when you try again. If
1224 all lines of the hunk are removed, then the edit is aborted and
1224 all lines of the hunk are removed, then the edit is aborted and
1225 the hunk is left unchanged.
1225 the hunk is left unchanged.
1226 """
1226 """
1227 )
1227 )
1228 (patchfd, patchfn) = pycompat.mkstemp(
1228 (patchfd, patchfn) = pycompat.mkstemp(
1229 prefix=b"hg-editor-", suffix=b".diff"
1229 prefix=b"hg-editor-", suffix=b".diff"
1230 )
1230 )
1231 ncpatchfp = None
1231 ncpatchfp = None
1232 try:
1232 try:
1233 # Write the initial patch
1233 # Write the initial patch
1234 f = util.nativeeolwriter(os.fdopen(patchfd, 'wb'))
1234 f = util.nativeeolwriter(os.fdopen(patchfd, 'wb'))
1235 chunk.header.write(f)
1235 chunk.header.write(f)
1236 chunk.write(f)
1236 chunk.write(f)
1237 f.write(
1237 f.write(
1238 b''.join(
1238 b''.join(
1239 [b'# ' + i + b'\n' for i in phelp.splitlines()]
1239 [b'# ' + i + b'\n' for i in phelp.splitlines()]
1240 )
1240 )
1241 )
1241 )
1242 f.close()
1242 f.close()
1243 # Start the editor and wait for it to complete
1243 # Start the editor and wait for it to complete
1244 editor = ui.geteditor()
1244 editor = ui.geteditor()
1245 ret = ui.system(
1245 ret = ui.system(
1246 b"%s \"%s\"" % (editor, patchfn),
1246 b"%s \"%s\"" % (editor, patchfn),
1247 environ={b'HGUSER': ui.username()},
1247 environ={b'HGUSER': ui.username()},
1248 blockedtag=b'filterpatch',
1248 blockedtag=b'filterpatch',
1249 )
1249 )
1250 if ret != 0:
1250 if ret != 0:
1251 ui.warn(_(b"editor exited with exit code %d\n") % ret)
1251 ui.warn(_(b"editor exited with exit code %d\n") % ret)
1252 continue
1252 continue
1253 # Remove comment lines
1253 # Remove comment lines
1254 patchfp = open(patchfn, 'rb')
1254 patchfp = open(patchfn, 'rb')
1255 ncpatchfp = stringio()
1255 ncpatchfp = stringio()
1256 for line in patchfp:
1256 for line in patchfp:
1257 line = util.fromnativeeol(line)
1257 line = util.fromnativeeol(line)
1258 if not line.startswith(b'#'):
1258 if not line.startswith(b'#'):
1259 ncpatchfp.write(line)
1259 ncpatchfp.write(line)
1260 patchfp.close()
1260 patchfp.close()
1261 ncpatchfp.seek(0)
1261 ncpatchfp.seek(0)
1262 newpatches = parsepatch(ncpatchfp)
1262 newpatches = parsepatch(ncpatchfp)
1263 finally:
1263 finally:
1264 os.unlink(patchfn)
1264 os.unlink(patchfn)
1265 del ncpatchfp
1265 del ncpatchfp
1266 # Signal that the chunk shouldn't be applied as-is, but
1266 # Signal that the chunk shouldn't be applied as-is, but
1267 # provide the new patch to be used instead.
1267 # provide the new patch to be used instead.
1268 ret = False
1268 ret = False
1269 elif r == 3: # Skip
1269 elif r == 3: # Skip
1270 ret = skipfile = False
1270 ret = skipfile = False
1271 elif r == 4: # file (Record remaining)
1271 elif r == 4: # file (Record remaining)
1272 ret = skipfile = True
1272 ret = skipfile = True
1273 elif r == 5: # done, skip remaining
1273 elif r == 5: # done, skip remaining
1274 ret = skipall = False
1274 ret = skipall = False
1275 elif r == 6: # all
1275 elif r == 6: # all
1276 ret = skipall = True
1276 ret = skipall = True
1277 elif r == 7: # quit
1277 elif r == 7: # quit
1278 raise error.CanceledError(_(b'user quit'))
1278 raise error.CanceledError(_(b'user quit'))
1279 return ret, skipfile, skipall, newpatches
1279 return ret, skipfile, skipall, newpatches
1280
1280
1281 seen = set()
1281 seen = set()
1282 applied = {} # 'filename' -> [] of chunks
1282 applied = {} # 'filename' -> [] of chunks
1283 skipfile, skipall = None, None
1283 skipfile, skipall = None, None
1284 pos, total = 1, sum(len(h.hunks) for h in headers)
1284 pos, total = 1, sum(len(h.hunks) for h in headers)
1285 for h in headers:
1285 for h in headers:
1286 pos += len(h.hunks)
1286 pos += len(h.hunks)
1287 skipfile = None
1287 skipfile = None
1288 fixoffset = 0
1288 fixoffset = 0
1289 hdr = b''.join(h.header)
1289 hdr = b''.join(h.header)
1290 if hdr in seen:
1290 if hdr in seen:
1291 continue
1291 continue
1292 seen.add(hdr)
1292 seen.add(hdr)
1293 if skipall is None:
1293 if skipall is None:
1294 h.pretty(ui)
1294 h.pretty(ui)
1295 files = h.files()
1295 files = h.files()
1296 msg = _(b'examine changes to %s?') % _(b' and ').join(
1296 msg = _(b'examine changes to %s?') % _(b' and ').join(
1297 b"'%s'" % f for f in files
1297 b"'%s'" % f for f in files
1298 )
1298 )
1299 if all(match.exact(f) for f in files):
1299 if all(match.exact(f) for f in files):
1300 r, skipall, np = True, None, None
1300 r, skipall, np = True, None, None
1301 else:
1301 else:
1302 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1302 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1303 if not r:
1303 if not r:
1304 continue
1304 continue
1305 applied[h.filename()] = [h]
1305 applied[h.filename()] = [h]
1306 if h.allhunks():
1306 if h.allhunks():
1307 applied[h.filename()] += h.hunks
1307 applied[h.filename()] += h.hunks
1308 continue
1308 continue
1309 for i, chunk in enumerate(h.hunks):
1309 for i, chunk in enumerate(h.hunks):
1310 if skipfile is None and skipall is None:
1310 if skipfile is None and skipall is None:
1311 chunk.pretty(ui)
1311 chunk.pretty(ui)
1312 if total == 1:
1312 if total == 1:
1313 msg = messages[b'single'][operation] % chunk.filename()
1313 msg = messages[b'single'][operation] % chunk.filename()
1314 else:
1314 else:
1315 idx = pos - len(h.hunks) + i
1315 idx = pos - len(h.hunks) + i
1316 msg = messages[b'multiple'][operation] % (
1316 msg = messages[b'multiple'][operation] % (
1317 idx,
1317 idx,
1318 total,
1318 total,
1319 chunk.filename(),
1319 chunk.filename(),
1320 )
1320 )
1321 r, skipfile, skipall, newpatches = prompt(
1321 r, skipfile, skipall, newpatches = prompt(
1322 skipfile, skipall, msg, chunk
1322 skipfile, skipall, msg, chunk
1323 )
1323 )
1324 if r:
1324 if r:
1325 if fixoffset:
1325 if fixoffset:
1326 chunk = copy.copy(chunk)
1326 chunk = copy.copy(chunk)
1327 chunk.toline += fixoffset
1327 chunk.toline += fixoffset
1328 applied[chunk.filename()].append(chunk)
1328 applied[chunk.filename()].append(chunk)
1329 elif newpatches is not None:
1329 elif newpatches is not None:
1330 for newpatch in newpatches:
1330 for newpatch in newpatches:
1331 for newhunk in newpatch.hunks:
1331 for newhunk in newpatch.hunks:
1332 if fixoffset:
1332 if fixoffset:
1333 newhunk.toline += fixoffset
1333 newhunk.toline += fixoffset
1334 applied[newhunk.filename()].append(newhunk)
1334 applied[newhunk.filename()].append(newhunk)
1335 else:
1335 else:
1336 fixoffset += chunk.removed - chunk.added
1336 fixoffset += chunk.removed - chunk.added
1337 return (
1337 return (
1338 sum(
1338 sum(
1339 [h for h in applied.values() if h[0].special() or len(h) > 1],
1339 [h for h in applied.values() if h[0].special() or len(h) > 1],
1340 [],
1340 [],
1341 ),
1341 ),
1342 {},
1342 {},
1343 )
1343 )
1344
1344
1345
1345
1346 class hunk:
1346 class hunk:
1347 def __init__(self, desc, num, lr, context):
1347 def __init__(self, desc, num, lr, context):
1348 self.number = num
1348 self.number = num
1349 self.desc = desc
1349 self.desc = desc
1350 self.hunk = [desc]
1350 self.hunk = [desc]
1351 self.a = []
1351 self.a = []
1352 self.b = []
1352 self.b = []
1353 self.starta = self.lena = None
1353 self.starta = self.lena = None
1354 self.startb = self.lenb = None
1354 self.startb = self.lenb = None
1355 if lr is not None:
1355 if lr is not None:
1356 if context:
1356 if context:
1357 self.read_context_hunk(lr)
1357 self.read_context_hunk(lr)
1358 else:
1358 else:
1359 self.read_unified_hunk(lr)
1359 self.read_unified_hunk(lr)
1360
1360
1361 def getnormalized(self):
1361 def getnormalized(self):
1362 """Return a copy with line endings normalized to LF."""
1362 """Return a copy with line endings normalized to LF."""
1363
1363
1364 def normalize(lines):
1364 def normalize(lines):
1365 nlines = []
1365 nlines = []
1366 for line in lines:
1366 for line in lines:
1367 if line.endswith(b'\r\n'):
1367 if line.endswith(b'\r\n'):
1368 line = line[:-2] + b'\n'
1368 line = line[:-2] + b'\n'
1369 nlines.append(line)
1369 nlines.append(line)
1370 return nlines
1370 return nlines
1371
1371
1372 # Dummy object, it is rebuilt manually
1372 # Dummy object, it is rebuilt manually
1373 nh = hunk(self.desc, self.number, None, None)
1373 nh = hunk(self.desc, self.number, None, None)
1374 nh.number = self.number
1374 nh.number = self.number
1375 nh.desc = self.desc
1375 nh.desc = self.desc
1376 nh.hunk = self.hunk
1376 nh.hunk = self.hunk
1377 nh.a = normalize(self.a)
1377 nh.a = normalize(self.a)
1378 nh.b = normalize(self.b)
1378 nh.b = normalize(self.b)
1379 nh.starta = self.starta
1379 nh.starta = self.starta
1380 nh.startb = self.startb
1380 nh.startb = self.startb
1381 nh.lena = self.lena
1381 nh.lena = self.lena
1382 nh.lenb = self.lenb
1382 nh.lenb = self.lenb
1383 return nh
1383 return nh
1384
1384
1385 def read_unified_hunk(self, lr):
1385 def read_unified_hunk(self, lr):
1386 m = unidesc.match(self.desc)
1386 m = unidesc.match(self.desc)
1387 if not m:
1387 if not m:
1388 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1388 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1389 self.starta, self.lena, self.startb, self.lenb = m.groups()
1389 self.starta, self.lena, self.startb, self.lenb = m.groups()
1390 if self.lena is None:
1390 if self.lena is None:
1391 self.lena = 1
1391 self.lena = 1
1392 else:
1392 else:
1393 self.lena = int(self.lena)
1393 self.lena = int(self.lena)
1394 if self.lenb is None:
1394 if self.lenb is None:
1395 self.lenb = 1
1395 self.lenb = 1
1396 else:
1396 else:
1397 self.lenb = int(self.lenb)
1397 self.lenb = int(self.lenb)
1398 self.starta = int(self.starta)
1398 self.starta = int(self.starta)
1399 self.startb = int(self.startb)
1399 self.startb = int(self.startb)
1400 try:
1400 try:
1401 diffhelper.addlines(
1401 diffhelper.addlines(
1402 lr, self.hunk, self.lena, self.lenb, self.a, self.b
1402 lr, self.hunk, self.lena, self.lenb, self.a, self.b
1403 )
1403 )
1404 except error.ParseError as e:
1404 except error.ParseError as e:
1405 raise PatchParseError(_(b"bad hunk #%d: %s") % (self.number, e))
1405 raise PatchParseError(_(b"bad hunk #%d: %s") % (self.number, e))
1406 # if we hit eof before finishing out the hunk, the last line will
1406 # if we hit eof before finishing out the hunk, the last line will
1407 # be zero length. Lets try to fix it up.
1407 # be zero length. Lets try to fix it up.
1408 while len(self.hunk[-1]) == 0:
1408 while len(self.hunk[-1]) == 0:
1409 del self.hunk[-1]
1409 del self.hunk[-1]
1410 del self.a[-1]
1410 del self.a[-1]
1411 del self.b[-1]
1411 del self.b[-1]
1412 self.lena -= 1
1412 self.lena -= 1
1413 self.lenb -= 1
1413 self.lenb -= 1
1414 self._fixnewline(lr)
1414 self._fixnewline(lr)
1415
1415
1416 def read_context_hunk(self, lr):
1416 def read_context_hunk(self, lr):
1417 self.desc = lr.readline()
1417 self.desc = lr.readline()
1418 m = contextdesc.match(self.desc)
1418 m = contextdesc.match(self.desc)
1419 if not m:
1419 if not m:
1420 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1420 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1421 self.starta, aend = m.groups()
1421 self.starta, aend = m.groups()
1422 self.starta = int(self.starta)
1422 self.starta = int(self.starta)
1423 if aend is None:
1423 if aend is None:
1424 aend = self.starta
1424 aend = self.starta
1425 self.lena = int(aend) - self.starta
1425 self.lena = int(aend) - self.starta
1426 if self.starta:
1426 if self.starta:
1427 self.lena += 1
1427 self.lena += 1
1428 for x in range(self.lena):
1428 for x in range(self.lena):
1429 l = lr.readline()
1429 l = lr.readline()
1430 if l.startswith(b'---'):
1430 if l.startswith(b'---'):
1431 # lines addition, old block is empty
1431 # lines addition, old block is empty
1432 lr.push(l)
1432 lr.push(l)
1433 break
1433 break
1434 s = l[2:]
1434 s = l[2:]
1435 if l.startswith(b'- ') or l.startswith(b'! '):
1435 if l.startswith(b'- ') or l.startswith(b'! '):
1436 u = b'-' + s
1436 u = b'-' + s
1437 elif l.startswith(b' '):
1437 elif l.startswith(b' '):
1438 u = b' ' + s
1438 u = b' ' + s
1439 else:
1439 else:
1440 raise PatchParseError(
1440 raise PatchParseError(
1441 _(b"bad hunk #%d old text line %d") % (self.number, x)
1441 _(b"bad hunk #%d old text line %d") % (self.number, x)
1442 )
1442 )
1443 self.a.append(u)
1443 self.a.append(u)
1444 self.hunk.append(u)
1444 self.hunk.append(u)
1445
1445
1446 l = lr.readline()
1446 l = lr.readline()
1447 if l.startswith(br'\ '):
1447 if l.startswith(br'\ '):
1448 s = self.a[-1][:-1]
1448 s = self.a[-1][:-1]
1449 self.a[-1] = s
1449 self.a[-1] = s
1450 self.hunk[-1] = s
1450 self.hunk[-1] = s
1451 l = lr.readline()
1451 l = lr.readline()
1452 m = contextdesc.match(l)
1452 m = contextdesc.match(l)
1453 if not m:
1453 if not m:
1454 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1454 raise PatchParseError(_(b"bad hunk #%d") % self.number)
1455 self.startb, bend = m.groups()
1455 self.startb, bend = m.groups()
1456 self.startb = int(self.startb)
1456 self.startb = int(self.startb)
1457 if bend is None:
1457 if bend is None:
1458 bend = self.startb
1458 bend = self.startb
1459 self.lenb = int(bend) - self.startb
1459 self.lenb = int(bend) - self.startb
1460 if self.startb:
1460 if self.startb:
1461 self.lenb += 1
1461 self.lenb += 1
1462 hunki = 1
1462 hunki = 1
1463 for x in range(self.lenb):
1463 for x in range(self.lenb):
1464 l = lr.readline()
1464 l = lr.readline()
1465 if l.startswith(br'\ '):
1465 if l.startswith(br'\ '):
1466 # XXX: the only way to hit this is with an invalid line range.
1466 # XXX: the only way to hit this is with an invalid line range.
1467 # The no-eol marker is not counted in the line range, but I
1467 # The no-eol marker is not counted in the line range, but I
1468 # guess there are diff(1) out there which behave differently.
1468 # guess there are diff(1) out there which behave differently.
1469 s = self.b[-1][:-1]
1469 s = self.b[-1][:-1]
1470 self.b[-1] = s
1470 self.b[-1] = s
1471 self.hunk[hunki - 1] = s
1471 self.hunk[hunki - 1] = s
1472 continue
1472 continue
1473 if not l:
1473 if not l:
1474 # line deletions, new block is empty and we hit EOF
1474 # line deletions, new block is empty and we hit EOF
1475 lr.push(l)
1475 lr.push(l)
1476 break
1476 break
1477 s = l[2:]
1477 s = l[2:]
1478 if l.startswith(b'+ ') or l.startswith(b'! '):
1478 if l.startswith(b'+ ') or l.startswith(b'! '):
1479 u = b'+' + s
1479 u = b'+' + s
1480 elif l.startswith(b' '):
1480 elif l.startswith(b' '):
1481 u = b' ' + s
1481 u = b' ' + s
1482 elif len(self.b) == 0:
1482 elif len(self.b) == 0:
1483 # line deletions, new block is empty
1483 # line deletions, new block is empty
1484 lr.push(l)
1484 lr.push(l)
1485 break
1485 break
1486 else:
1486 else:
1487 raise PatchParseError(
1487 raise PatchParseError(
1488 _(b"bad hunk #%d old text line %d") % (self.number, x)
1488 _(b"bad hunk #%d old text line %d") % (self.number, x)
1489 )
1489 )
1490 self.b.append(s)
1490 self.b.append(s)
1491 while True:
1491 while True:
1492 if hunki >= len(self.hunk):
1492 if hunki >= len(self.hunk):
1493 h = b""
1493 h = b""
1494 else:
1494 else:
1495 h = self.hunk[hunki]
1495 h = self.hunk[hunki]
1496 hunki += 1
1496 hunki += 1
1497 if h == u:
1497 if h == u:
1498 break
1498 break
1499 elif h.startswith(b'-'):
1499 elif h.startswith(b'-'):
1500 continue
1500 continue
1501 else:
1501 else:
1502 self.hunk.insert(hunki - 1, u)
1502 self.hunk.insert(hunki - 1, u)
1503 break
1503 break
1504
1504
1505 if not self.a:
1505 if not self.a:
1506 # this happens when lines were only added to the hunk
1506 # this happens when lines were only added to the hunk
1507 for x in self.hunk:
1507 for x in self.hunk:
1508 if x.startswith(b'-') or x.startswith(b' '):
1508 if x.startswith(b'-') or x.startswith(b' '):
1509 self.a.append(x)
1509 self.a.append(x)
1510 if not self.b:
1510 if not self.b:
1511 # this happens when lines were only deleted from the hunk
1511 # this happens when lines were only deleted from the hunk
1512 for x in self.hunk:
1512 for x in self.hunk:
1513 if x.startswith(b'+') or x.startswith(b' '):
1513 if x.startswith(b'+') or x.startswith(b' '):
1514 self.b.append(x[1:])
1514 self.b.append(x[1:])
1515 # @@ -start,len +start,len @@
1515 # @@ -start,len +start,len @@
1516 self.desc = b"@@ -%d,%d +%d,%d @@\n" % (
1516 self.desc = b"@@ -%d,%d +%d,%d @@\n" % (
1517 self.starta,
1517 self.starta,
1518 self.lena,
1518 self.lena,
1519 self.startb,
1519 self.startb,
1520 self.lenb,
1520 self.lenb,
1521 )
1521 )
1522 self.hunk[0] = self.desc
1522 self.hunk[0] = self.desc
1523 self._fixnewline(lr)
1523 self._fixnewline(lr)
1524
1524
1525 def _fixnewline(self, lr):
1525 def _fixnewline(self, lr):
1526 l = lr.readline()
1526 l = lr.readline()
1527 if l.startswith(br'\ '):
1527 if l.startswith(br'\ '):
1528 diffhelper.fixnewline(self.hunk, self.a, self.b)
1528 diffhelper.fixnewline(self.hunk, self.a, self.b)
1529 else:
1529 else:
1530 lr.push(l)
1530 lr.push(l)
1531
1531
1532 def complete(self):
1532 def complete(self):
1533 return len(self.a) == self.lena and len(self.b) == self.lenb
1533 return len(self.a) == self.lena and len(self.b) == self.lenb
1534
1534
1535 def _fuzzit(self, old, new, fuzz, toponly):
1535 def _fuzzit(self, old, new, fuzz, toponly):
1536 # this removes context lines from the top and bottom of list 'l'. It
1536 # this removes context lines from the top and bottom of list 'l'. It
1537 # checks the hunk to make sure only context lines are removed, and then
1537 # checks the hunk to make sure only context lines are removed, and then
1538 # returns a new shortened list of lines.
1538 # returns a new shortened list of lines.
1539 fuzz = min(fuzz, len(old))
1539 fuzz = min(fuzz, len(old))
1540 if fuzz:
1540 if fuzz:
1541 top = 0
1541 top = 0
1542 bot = 0
1542 bot = 0
1543 hlen = len(self.hunk)
1543 hlen = len(self.hunk)
1544 for x in range(hlen - 1):
1544 for x in range(hlen - 1):
1545 # the hunk starts with the @@ line, so use x+1
1545 # the hunk starts with the @@ line, so use x+1
1546 if self.hunk[x + 1].startswith(b' '):
1546 if self.hunk[x + 1].startswith(b' '):
1547 top += 1
1547 top += 1
1548 else:
1548 else:
1549 break
1549 break
1550 if not toponly:
1550 if not toponly:
1551 for x in range(hlen - 1):
1551 for x in range(hlen - 1):
1552 if self.hunk[hlen - bot - 1].startswith(b' '):
1552 if self.hunk[hlen - bot - 1].startswith(b' '):
1553 bot += 1
1553 bot += 1
1554 else:
1554 else:
1555 break
1555 break
1556
1556
1557 bot = min(fuzz, bot)
1557 bot = min(fuzz, bot)
1558 top = min(fuzz, top)
1558 top = min(fuzz, top)
1559 return old[top : len(old) - bot], new[top : len(new) - bot], top
1559 return old[top : len(old) - bot], new[top : len(new) - bot], top
1560 return old, new, 0
1560 return old, new, 0
1561
1561
1562 def fuzzit(self, fuzz, toponly):
1562 def fuzzit(self, fuzz, toponly):
1563 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1563 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1564 oldstart = self.starta + top
1564 oldstart = self.starta + top
1565 newstart = self.startb + top
1565 newstart = self.startb + top
1566 # zero length hunk ranges already have their start decremented
1566 # zero length hunk ranges already have their start decremented
1567 if self.lena and oldstart > 0:
1567 if self.lena and oldstart > 0:
1568 oldstart -= 1
1568 oldstart -= 1
1569 if self.lenb and newstart > 0:
1569 if self.lenb and newstart > 0:
1570 newstart -= 1
1570 newstart -= 1
1571 return old, oldstart, new, newstart
1571 return old, oldstart, new, newstart
1572
1572
1573
1573
1574 class binhunk:
1574 class binhunk:
1575 """A binary patch file."""
1575 """A binary patch file."""
1576
1576
1577 def __init__(self, lr, fname):
1577 def __init__(self, lr, fname):
1578 self.text = None
1578 self.text = None
1579 self.delta = False
1579 self.delta = False
1580 self.hunk = [b'GIT binary patch\n']
1580 self.hunk = [b'GIT binary patch\n']
1581 self._fname = fname
1581 self._fname = fname
1582 self._read(lr)
1582 self._read(lr)
1583
1583
1584 def complete(self):
1584 def complete(self):
1585 return self.text is not None
1585 return self.text is not None
1586
1586
1587 def new(self, lines):
1587 def new(self, lines):
1588 if self.delta:
1588 if self.delta:
1589 return [applybindelta(self.text, b''.join(lines))]
1589 return [applybindelta(self.text, b''.join(lines))]
1590 return [self.text]
1590 return [self.text]
1591
1591
1592 def _read(self, lr):
1592 def _read(self, lr):
1593 def getline(lr, hunk):
1593 def getline(lr, hunk):
1594 l = lr.readline()
1594 l = lr.readline()
1595 hunk.append(l)
1595 hunk.append(l)
1596 return l.rstrip(b'\r\n')
1596 return l.rstrip(b'\r\n')
1597
1597
1598 while True:
1598 while True:
1599 line = getline(lr, self.hunk)
1599 line = getline(lr, self.hunk)
1600 if not line:
1600 if not line:
1601 raise PatchParseError(
1601 raise PatchParseError(
1602 _(b'could not extract "%s" binary data') % self._fname
1602 _(b'could not extract "%s" binary data') % self._fname
1603 )
1603 )
1604 if line.startswith(b'literal '):
1604 if line.startswith(b'literal '):
1605 size = int(line[8:].rstrip())
1605 size = int(line[8:].rstrip())
1606 break
1606 break
1607 if line.startswith(b'delta '):
1607 if line.startswith(b'delta '):
1608 size = int(line[6:].rstrip())
1608 size = int(line[6:].rstrip())
1609 self.delta = True
1609 self.delta = True
1610 break
1610 break
1611 dec = []
1611 dec = []
1612 line = getline(lr, self.hunk)
1612 line = getline(lr, self.hunk)
1613 while len(line) > 1:
1613 while len(line) > 1:
1614 l = line[0:1]
1614 l = line[0:1]
1615 if l <= b'Z' and l >= b'A':
1615 if l <= b'Z' and l >= b'A':
1616 l = ord(l) - ord(b'A') + 1
1616 l = ord(l) - ord(b'A') + 1
1617 else:
1617 else:
1618 l = ord(l) - ord(b'a') + 27
1618 l = ord(l) - ord(b'a') + 27
1619 try:
1619 try:
1620 dec.append(util.b85decode(line[1:])[:l])
1620 dec.append(util.b85decode(line[1:])[:l])
1621 except ValueError as e:
1621 except ValueError as e:
1622 raise PatchParseError(
1622 raise PatchParseError(
1623 _(b'could not decode "%s" binary patch: %s')
1623 _(b'could not decode "%s" binary patch: %s')
1624 % (self._fname, stringutil.forcebytestr(e))
1624 % (self._fname, stringutil.forcebytestr(e))
1625 )
1625 )
1626 line = getline(lr, self.hunk)
1626 line = getline(lr, self.hunk)
1627 text = zlib.decompress(b''.join(dec))
1627 text = zlib.decompress(b''.join(dec))
1628 if len(text) != size:
1628 if len(text) != size:
1629 raise PatchParseError(
1629 raise PatchParseError(
1630 _(b'"%s" length is %d bytes, should be %d')
1630 _(b'"%s" length is %d bytes, should be %d')
1631 % (self._fname, len(text), size)
1631 % (self._fname, len(text), size)
1632 )
1632 )
1633 self.text = text
1633 self.text = text
1634
1634
1635
1635
1636 def parsefilename(str):
1636 def parsefilename(str):
1637 # --- filename \t|space stuff
1637 # --- filename \t|space stuff
1638 s = str[4:].rstrip(b'\r\n')
1638 s = str[4:].rstrip(b'\r\n')
1639 i = s.find(b'\t')
1639 i = s.find(b'\t')
1640 if i < 0:
1640 if i < 0:
1641 i = s.find(b' ')
1641 i = s.find(b' ')
1642 if i < 0:
1642 if i < 0:
1643 return s
1643 return s
1644 return s[:i]
1644 return s[:i]
1645
1645
1646
1646
1647 def reversehunks(hunks):
1647 def reversehunks(hunks):
1648 '''reverse the signs in the hunks given as argument
1648 '''reverse the signs in the hunks given as argument
1649
1649
1650 This function operates on hunks coming out of patch.filterpatch, that is
1650 This function operates on hunks coming out of patch.filterpatch, that is
1651 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1651 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1652
1652
1653 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1653 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1654 ... --- a/folder1/g
1654 ... --- a/folder1/g
1655 ... +++ b/folder1/g
1655 ... +++ b/folder1/g
1656 ... @@ -1,7 +1,7 @@
1656 ... @@ -1,7 +1,7 @@
1657 ... +firstline
1657 ... +firstline
1658 ... c
1658 ... c
1659 ... 1
1659 ... 1
1660 ... 2
1660 ... 2
1661 ... + 3
1661 ... + 3
1662 ... -4
1662 ... -4
1663 ... 5
1663 ... 5
1664 ... d
1664 ... d
1665 ... +lastline"""
1665 ... +lastline"""
1666 >>> hunks = parsepatch([rawpatch])
1666 >>> hunks = parsepatch([rawpatch])
1667 >>> hunkscomingfromfilterpatch = []
1667 >>> hunkscomingfromfilterpatch = []
1668 >>> for h in hunks:
1668 >>> for h in hunks:
1669 ... hunkscomingfromfilterpatch.append(h)
1669 ... hunkscomingfromfilterpatch.append(h)
1670 ... hunkscomingfromfilterpatch.extend(h.hunks)
1670 ... hunkscomingfromfilterpatch.extend(h.hunks)
1671
1671
1672 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1672 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1673 >>> from . import util
1673 >>> from . import util
1674 >>> fp = util.stringio()
1674 >>> fp = util.stringio()
1675 >>> for c in reversedhunks:
1675 >>> for c in reversedhunks:
1676 ... c.write(fp)
1676 ... c.write(fp)
1677 >>> fp.seek(0) or None
1677 >>> fp.seek(0) or None
1678 >>> reversedpatch = fp.read()
1678 >>> reversedpatch = fp.read()
1679 >>> print(pycompat.sysstr(reversedpatch))
1679 >>> print(pycompat.sysstr(reversedpatch))
1680 diff --git a/folder1/g b/folder1/g
1680 diff --git a/folder1/g b/folder1/g
1681 --- a/folder1/g
1681 --- a/folder1/g
1682 +++ b/folder1/g
1682 +++ b/folder1/g
1683 @@ -1,4 +1,3 @@
1683 @@ -1,4 +1,3 @@
1684 -firstline
1684 -firstline
1685 c
1685 c
1686 1
1686 1
1687 2
1687 2
1688 @@ -2,6 +1,6 @@
1688 @@ -2,6 +1,6 @@
1689 c
1689 c
1690 1
1690 1
1691 2
1691 2
1692 - 3
1692 - 3
1693 +4
1693 +4
1694 5
1694 5
1695 d
1695 d
1696 @@ -6,3 +5,2 @@
1696 @@ -6,3 +5,2 @@
1697 5
1697 5
1698 d
1698 d
1699 -lastline
1699 -lastline
1700
1700
1701 '''
1701 '''
1702
1702
1703 newhunks = []
1703 newhunks = []
1704 for c in hunks:
1704 for c in hunks:
1705 if hasattr(c, 'reversehunk'):
1705 if hasattr(c, 'reversehunk'):
1706 c = c.reversehunk()
1706 c = c.reversehunk()
1707 newhunks.append(c)
1707 newhunks.append(c)
1708 return newhunks
1708 return newhunks
1709
1709
1710
1710
1711 def parsepatch(originalchunks, maxcontext=None):
1711 def parsepatch(originalchunks, maxcontext=None):
1712 """patch -> [] of headers -> [] of hunks
1712 """patch -> [] of headers -> [] of hunks
1713
1713
1714 If maxcontext is not None, trim context lines if necessary.
1714 If maxcontext is not None, trim context lines if necessary.
1715
1715
1716 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1716 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1717 ... --- a/folder1/g
1717 ... --- a/folder1/g
1718 ... +++ b/folder1/g
1718 ... +++ b/folder1/g
1719 ... @@ -1,8 +1,10 @@
1719 ... @@ -1,8 +1,10 @@
1720 ... 1
1720 ... 1
1721 ... 2
1721 ... 2
1722 ... -3
1722 ... -3
1723 ... 4
1723 ... 4
1724 ... 5
1724 ... 5
1725 ... 6
1725 ... 6
1726 ... +6.1
1726 ... +6.1
1727 ... +6.2
1727 ... +6.2
1728 ... 7
1728 ... 7
1729 ... 8
1729 ... 8
1730 ... +9'''
1730 ... +9'''
1731 >>> out = util.stringio()
1731 >>> out = util.stringio()
1732 >>> headers = parsepatch([rawpatch], maxcontext=1)
1732 >>> headers = parsepatch([rawpatch], maxcontext=1)
1733 >>> for header in headers:
1733 >>> for header in headers:
1734 ... header.write(out)
1734 ... header.write(out)
1735 ... for hunk in header.hunks:
1735 ... for hunk in header.hunks:
1736 ... hunk.write(out)
1736 ... hunk.write(out)
1737 >>> print(pycompat.sysstr(out.getvalue()))
1737 >>> print(pycompat.sysstr(out.getvalue()))
1738 diff --git a/folder1/g b/folder1/g
1738 diff --git a/folder1/g b/folder1/g
1739 --- a/folder1/g
1739 --- a/folder1/g
1740 +++ b/folder1/g
1740 +++ b/folder1/g
1741 @@ -2,3 +2,2 @@
1741 @@ -2,3 +2,2 @@
1742 2
1742 2
1743 -3
1743 -3
1744 4
1744 4
1745 @@ -6,2 +5,4 @@
1745 @@ -6,2 +5,4 @@
1746 6
1746 6
1747 +6.1
1747 +6.1
1748 +6.2
1748 +6.2
1749 7
1749 7
1750 @@ -8,1 +9,2 @@
1750 @@ -8,1 +9,2 @@
1751 8
1751 8
1752 +9
1752 +9
1753 """
1753 """
1754
1754
1755 class parser:
1755 class parser:
1756 """patch parsing state machine"""
1756 """patch parsing state machine"""
1757
1757
1758 def __init__(self):
1758 def __init__(self):
1759 self.fromline = 0
1759 self.fromline = 0
1760 self.toline = 0
1760 self.toline = 0
1761 self.proc = b''
1761 self.proc = b''
1762 self.header = None
1762 self.header = None
1763 self.context = []
1763 self.context = []
1764 self.before = []
1764 self.before = []
1765 self.hunk = []
1765 self.hunk = []
1766 self.headers = []
1766 self.headers = []
1767
1767
1768 def addrange(self, limits):
1768 def addrange(self, limits):
1769 self.addcontext([])
1769 self.addcontext([])
1770 fromstart, fromend, tostart, toend, proc = limits
1770 fromstart, fromend, tostart, toend, proc = limits
1771 self.fromline = int(fromstart)
1771 self.fromline = int(fromstart)
1772 self.toline = int(tostart)
1772 self.toline = int(tostart)
1773 self.proc = proc
1773 self.proc = proc
1774
1774
1775 def addcontext(self, context):
1775 def addcontext(self, context):
1776 if self.hunk:
1776 if self.hunk:
1777 h = recordhunk(
1777 h = recordhunk(
1778 self.header,
1778 self.header,
1779 self.fromline,
1779 self.fromline,
1780 self.toline,
1780 self.toline,
1781 self.proc,
1781 self.proc,
1782 self.before,
1782 self.before,
1783 self.hunk,
1783 self.hunk,
1784 context,
1784 context,
1785 maxcontext,
1785 maxcontext,
1786 )
1786 )
1787 self.header.hunks.append(h)
1787 self.header.hunks.append(h)
1788 self.fromline += len(self.before) + h.removed
1788 self.fromline += len(self.before) + h.removed
1789 self.toline += len(self.before) + h.added
1789 self.toline += len(self.before) + h.added
1790 self.before = []
1790 self.before = []
1791 self.hunk = []
1791 self.hunk = []
1792 self.context = context
1792 self.context = context
1793
1793
1794 def addhunk(self, hunk):
1794 def addhunk(self, hunk):
1795 if self.context:
1795 if self.context:
1796 self.before = self.context
1796 self.before = self.context
1797 self.context = []
1797 self.context = []
1798 if self.hunk:
1798 if self.hunk:
1799 self.addcontext([])
1799 self.addcontext([])
1800 self.hunk = hunk
1800 self.hunk = hunk
1801
1801
1802 def newfile(self, hdr):
1802 def newfile(self, hdr):
1803 self.addcontext([])
1803 self.addcontext([])
1804 h = header(hdr)
1804 h = header(hdr)
1805 self.headers.append(h)
1805 self.headers.append(h)
1806 self.header = h
1806 self.header = h
1807
1807
1808 def addother(self, line):
1808 def addother(self, line):
1809 pass # 'other' lines are ignored
1809 pass # 'other' lines are ignored
1810
1810
1811 def finished(self):
1811 def finished(self):
1812 self.addcontext([])
1812 self.addcontext([])
1813 return self.headers
1813 return self.headers
1814
1814
1815 transitions = {
1815 transitions = {
1816 b'file': {
1816 b'file': {
1817 b'context': addcontext,
1817 b'context': addcontext,
1818 b'file': newfile,
1818 b'file': newfile,
1819 b'hunk': addhunk,
1819 b'hunk': addhunk,
1820 b'range': addrange,
1820 b'range': addrange,
1821 },
1821 },
1822 b'context': {
1822 b'context': {
1823 b'file': newfile,
1823 b'file': newfile,
1824 b'hunk': addhunk,
1824 b'hunk': addhunk,
1825 b'range': addrange,
1825 b'range': addrange,
1826 b'other': addother,
1826 b'other': addother,
1827 },
1827 },
1828 b'hunk': {
1828 b'hunk': {
1829 b'context': addcontext,
1829 b'context': addcontext,
1830 b'file': newfile,
1830 b'file': newfile,
1831 b'range': addrange,
1831 b'range': addrange,
1832 },
1832 },
1833 b'range': {b'context': addcontext, b'hunk': addhunk},
1833 b'range': {b'context': addcontext, b'hunk': addhunk},
1834 b'other': {b'other': addother},
1834 b'other': {b'other': addother},
1835 }
1835 }
1836
1836
1837 p = parser()
1837 p = parser()
1838 fp = stringio()
1838 fp = stringio()
1839 fp.write(b''.join(originalchunks))
1839 fp.write(b''.join(originalchunks))
1840 fp.seek(0)
1840 fp.seek(0)
1841
1841
1842 state = b'context'
1842 state = b'context'
1843 for newstate, data in scanpatch(fp):
1843 for newstate, data in scanpatch(fp):
1844 try:
1844 try:
1845 p.transitions[state][newstate](p, data)
1845 p.transitions[state][newstate](p, data)
1846 except KeyError:
1846 except KeyError:
1847 raise PatchParseError(
1847 raise PatchParseError(
1848 b'unhandled transition: %s -> %s' % (state, newstate)
1848 b'unhandled transition: %s -> %s' % (state, newstate)
1849 )
1849 )
1850 state = newstate
1850 state = newstate
1851 del fp
1851 del fp
1852 return p.finished()
1852 return p.finished()
1853
1853
1854
1854
1855 def pathtransform(path, strip, prefix):
1855 def pathtransform(path, strip, prefix):
1856 """turn a path from a patch into a path suitable for the repository
1856 """turn a path from a patch into a path suitable for the repository
1857
1857
1858 prefix, if not empty, is expected to be normalized with a / at the end.
1858 prefix, if not empty, is expected to be normalized with a / at the end.
1859
1859
1860 Returns (stripped components, path in repository).
1860 Returns (stripped components, path in repository).
1861
1861
1862 >>> pathtransform(b'a/b/c', 0, b'')
1862 >>> pathtransform(b'a/b/c', 0, b'')
1863 ('', 'a/b/c')
1863 ('', 'a/b/c')
1864 >>> pathtransform(b' a/b/c ', 0, b'')
1864 >>> pathtransform(b' a/b/c ', 0, b'')
1865 ('', ' a/b/c')
1865 ('', ' a/b/c')
1866 >>> pathtransform(b' a/b/c ', 2, b'')
1866 >>> pathtransform(b' a/b/c ', 2, b'')
1867 ('a/b/', 'c')
1867 ('a/b/', 'c')
1868 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1868 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1869 ('', 'd/e/a/b/c')
1869 ('', 'd/e/a/b/c')
1870 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1870 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1871 ('a//b/', 'd/e/c')
1871 ('a//b/', 'd/e/c')
1872 >>> pathtransform(b'a/b/c', 3, b'')
1872 >>> pathtransform(b'a/b/c', 3, b'')
1873 Traceback (most recent call last):
1873 Traceback (most recent call last):
1874 PatchApplicationError: unable to strip away 1 of 3 dirs from a/b/c
1874 PatchApplicationError: unable to strip away 1 of 3 dirs from a/b/c
1875 """
1875 """
1876 pathlen = len(path)
1876 pathlen = len(path)
1877 i = 0
1877 i = 0
1878 if strip == 0:
1878 if strip == 0:
1879 return b'', prefix + path.rstrip()
1879 return b'', prefix + path.rstrip()
1880 count = strip
1880 count = strip
1881 while count > 0:
1881 while count > 0:
1882 i = path.find(b'/', i)
1882 i = path.find(b'/', i)
1883 if i == -1:
1883 if i == -1:
1884 raise PatchApplicationError(
1884 raise PatchApplicationError(
1885 _(b"unable to strip away %d of %d dirs from %s")
1885 _(b"unable to strip away %d of %d dirs from %s")
1886 % (count, strip, path)
1886 % (count, strip, path)
1887 )
1887 )
1888 i += 1
1888 i += 1
1889 # consume '//' in the path
1889 # consume '//' in the path
1890 while i < pathlen - 1 and path[i : i + 1] == b'/':
1890 while i < pathlen - 1 and path[i : i + 1] == b'/':
1891 i += 1
1891 i += 1
1892 count -= 1
1892 count -= 1
1893 return path[:i].lstrip(), prefix + path[i:].rstrip()
1893 return path[:i].lstrip(), prefix + path[i:].rstrip()
1894
1894
1895
1895
1896 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1896 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1897 nulla = afile_orig == b"/dev/null"
1897 nulla = afile_orig == b"/dev/null"
1898 nullb = bfile_orig == b"/dev/null"
1898 nullb = bfile_orig == b"/dev/null"
1899 create = nulla and hunk.starta == 0 and hunk.lena == 0
1899 create = nulla and hunk.starta == 0 and hunk.lena == 0
1900 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1900 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1901 abase, afile = pathtransform(afile_orig, strip, prefix)
1901 abase, afile = pathtransform(afile_orig, strip, prefix)
1902 gooda = not nulla and backend.exists(afile)
1902 gooda = not nulla and backend.exists(afile)
1903 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1903 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1904 if afile == bfile:
1904 if afile == bfile:
1905 goodb = gooda
1905 goodb = gooda
1906 else:
1906 else:
1907 goodb = not nullb and backend.exists(bfile)
1907 goodb = not nullb and backend.exists(bfile)
1908 missing = not goodb and not gooda and not create
1908 missing = not goodb and not gooda and not create
1909
1909
1910 # some diff programs apparently produce patches where the afile is
1910 # some diff programs apparently produce patches where the afile is
1911 # not /dev/null, but afile starts with bfile
1911 # not /dev/null, but afile starts with bfile
1912 abasedir = afile[: afile.rfind(b'/') + 1]
1912 abasedir = afile[: afile.rfind(b'/') + 1]
1913 bbasedir = bfile[: bfile.rfind(b'/') + 1]
1913 bbasedir = bfile[: bfile.rfind(b'/') + 1]
1914 if (
1914 if (
1915 missing
1915 missing
1916 and abasedir == bbasedir
1916 and abasedir == bbasedir
1917 and afile.startswith(bfile)
1917 and afile.startswith(bfile)
1918 and hunk.starta == 0
1918 and hunk.starta == 0
1919 and hunk.lena == 0
1919 and hunk.lena == 0
1920 ):
1920 ):
1921 create = True
1921 create = True
1922 missing = False
1922 missing = False
1923
1923
1924 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1924 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1925 # diff is between a file and its backup. In this case, the original
1925 # diff is between a file and its backup. In this case, the original
1926 # file should be patched (see original mpatch code).
1926 # file should be patched (see original mpatch code).
1927 isbackup = abase == bbase and bfile.startswith(afile)
1927 isbackup = abase == bbase and bfile.startswith(afile)
1928 fname = None
1928 fname = None
1929 if not missing:
1929 if not missing:
1930 if gooda and goodb:
1930 if gooda and goodb:
1931 if isbackup:
1931 if isbackup:
1932 fname = afile
1932 fname = afile
1933 else:
1933 else:
1934 fname = bfile
1934 fname = bfile
1935 elif gooda:
1935 elif gooda:
1936 fname = afile
1936 fname = afile
1937
1937
1938 if not fname:
1938 if not fname:
1939 if not nullb:
1939 if not nullb:
1940 if isbackup:
1940 if isbackup:
1941 fname = afile
1941 fname = afile
1942 else:
1942 else:
1943 fname = bfile
1943 fname = bfile
1944 elif not nulla:
1944 elif not nulla:
1945 fname = afile
1945 fname = afile
1946 else:
1946 else:
1947 raise PatchParseError(_(b"undefined source and destination files"))
1947 raise PatchParseError(_(b"undefined source and destination files"))
1948
1948
1949 gp = patchmeta(fname)
1949 gp = patchmeta(fname)
1950 if create:
1950 if create:
1951 gp.op = b'ADD'
1951 gp.op = b'ADD'
1952 elif remove:
1952 elif remove:
1953 gp.op = b'DELETE'
1953 gp.op = b'DELETE'
1954 return gp
1954 return gp
1955
1955
1956
1956
1957 def scanpatch(fp):
1957 def scanpatch(fp):
1958 """like patch.iterhunks, but yield different events
1958 """like patch.iterhunks, but yield different events
1959
1959
1960 - ('file', [header_lines + fromfile + tofile])
1960 - ('file', [header_lines + fromfile + tofile])
1961 - ('context', [context_lines])
1961 - ('context', [context_lines])
1962 - ('hunk', [hunk_lines])
1962 - ('hunk', [hunk_lines])
1963 - ('range', (-start,len, +start,len, proc))
1963 - ('range', (-start,len, +start,len, proc))
1964 """
1964 """
1965 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1965 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1966 lr = linereader(fp)
1966 lr = linereader(fp)
1967
1967
1968 def scanwhile(first, p):
1968 def scanwhile(first, p):
1969 """scan lr while predicate holds"""
1969 """scan lr while predicate holds"""
1970 lines = [first]
1970 lines = [first]
1971 for line in iter(lr.readline, b''):
1971 for line in iter(lr.readline, b''):
1972 if p(line):
1972 if p(line):
1973 lines.append(line)
1973 lines.append(line)
1974 else:
1974 else:
1975 lr.push(line)
1975 lr.push(line)
1976 break
1976 break
1977 return lines
1977 return lines
1978
1978
1979 for line in iter(lr.readline, b''):
1979 for line in iter(lr.readline, b''):
1980 if line.startswith(b'diff --git a/') or line.startswith(b'diff -r '):
1980 if line.startswith(b'diff --git a/') or line.startswith(b'diff -r '):
1981
1981
1982 def notheader(line):
1982 def notheader(line):
1983 s = line.split(None, 1)
1983 s = line.split(None, 1)
1984 return not s or s[0] not in (b'---', b'diff')
1984 return not s or s[0] not in (b'---', b'diff')
1985
1985
1986 header = scanwhile(line, notheader)
1986 header = scanwhile(line, notheader)
1987 fromfile = lr.readline()
1987 fromfile = lr.readline()
1988 if fromfile.startswith(b'---'):
1988 if fromfile.startswith(b'---'):
1989 tofile = lr.readline()
1989 tofile = lr.readline()
1990 header += [fromfile, tofile]
1990 header += [fromfile, tofile]
1991 else:
1991 else:
1992 lr.push(fromfile)
1992 lr.push(fromfile)
1993 yield b'file', header
1993 yield b'file', header
1994 elif line.startswith(b' '):
1994 elif line.startswith(b' '):
1995 cs = (b' ', b'\\')
1995 cs = (b' ', b'\\')
1996 yield b'context', scanwhile(line, lambda l: l.startswith(cs))
1996 yield b'context', scanwhile(line, lambda l: l.startswith(cs))
1997 elif line.startswith((b'-', b'+')):
1997 elif line.startswith((b'-', b'+')):
1998 cs = (b'-', b'+', b'\\')
1998 cs = (b'-', b'+', b'\\')
1999 yield b'hunk', scanwhile(line, lambda l: l.startswith(cs))
1999 yield b'hunk', scanwhile(line, lambda l: l.startswith(cs))
2000 else:
2000 else:
2001 m = lines_re.match(line)
2001 m = lines_re.match(line)
2002 if m:
2002 if m:
2003 yield b'range', m.groups()
2003 yield b'range', m.groups()
2004 else:
2004 else:
2005 yield b'other', line
2005 yield b'other', line
2006
2006
2007
2007
2008 def scangitpatch(lr, firstline):
2008 def scangitpatch(lr, firstline):
2009 """
2009 """
2010 Git patches can emit:
2010 Git patches can emit:
2011 - rename a to b
2011 - rename a to b
2012 - change b
2012 - change b
2013 - copy a to c
2013 - copy a to c
2014 - change c
2014 - change c
2015
2015
2016 We cannot apply this sequence as-is, the renamed 'a' could not be
2016 We cannot apply this sequence as-is, the renamed 'a' could not be
2017 found for it would have been renamed already. And we cannot copy
2017 found for it would have been renamed already. And we cannot copy
2018 from 'b' instead because 'b' would have been changed already. So
2018 from 'b' instead because 'b' would have been changed already. So
2019 we scan the git patch for copy and rename commands so we can
2019 we scan the git patch for copy and rename commands so we can
2020 perform the copies ahead of time.
2020 perform the copies ahead of time.
2021 """
2021 """
2022 pos = 0
2022 pos = 0
2023 try:
2023 try:
2024 pos = lr.fp.tell()
2024 pos = lr.fp.tell()
2025 fp = lr.fp
2025 fp = lr.fp
2026 except IOError:
2026 except IOError:
2027 fp = stringio(lr.fp.read())
2027 fp = stringio(lr.fp.read())
2028 gitlr = linereader(fp)
2028 gitlr = linereader(fp)
2029 gitlr.push(firstline)
2029 gitlr.push(firstline)
2030 gitpatches = readgitpatch(gitlr)
2030 gitpatches = readgitpatch(gitlr)
2031 fp.seek(pos)
2031 fp.seek(pos)
2032 return gitpatches
2032 return gitpatches
2033
2033
2034
2034
2035 def iterhunks(fp):
2035 def iterhunks(fp):
2036 """Read a patch and yield the following events:
2036 """Read a patch and yield the following events:
2037 - ("file", afile, bfile, firsthunk): select a new target file.
2037 - ("file", afile, bfile, firsthunk): select a new target file.
2038 - ("hunk", hunk): a new hunk is ready to be applied, follows a
2038 - ("hunk", hunk): a new hunk is ready to be applied, follows a
2039 "file" event.
2039 "file" event.
2040 - ("git", gitchanges): current diff is in git format, gitchanges
2040 - ("git", gitchanges): current diff is in git format, gitchanges
2041 maps filenames to gitpatch records. Unique event.
2041 maps filenames to gitpatch records. Unique event.
2042 """
2042 """
2043 afile = b""
2043 afile = b""
2044 bfile = b""
2044 bfile = b""
2045 state = None
2045 state = None
2046 hunknum = 0
2046 hunknum = 0
2047 emitfile = newfile = False
2047 emitfile = newfile = False
2048 gitpatches = None
2048 gitpatches = None
2049
2049
2050 # our states
2050 # our states
2051 BFILE = 1
2051 BFILE = 1
2052 context = None
2052 context = None
2053 lr = linereader(fp)
2053 lr = linereader(fp)
2054
2054
2055 for x in iter(lr.readline, b''):
2055 for x in iter(lr.readline, b''):
2056 if state == BFILE and (
2056 if state == BFILE and (
2057 (not context and x.startswith(b'@'))
2057 (not context and x.startswith(b'@'))
2058 or (context is not False and x.startswith(b'***************'))
2058 or (context is not False and x.startswith(b'***************'))
2059 or x.startswith(b'GIT binary patch')
2059 or x.startswith(b'GIT binary patch')
2060 ):
2060 ):
2061 gp = None
2061 gp = None
2062 if gitpatches and gitpatches[-1].ispatching(afile, bfile):
2062 if gitpatches and gitpatches[-1].ispatching(afile, bfile):
2063 gp = gitpatches.pop()
2063 gp = gitpatches.pop()
2064 if x.startswith(b'GIT binary patch'):
2064 if x.startswith(b'GIT binary patch'):
2065 h = binhunk(lr, gp.path)
2065 h = binhunk(lr, gp.path)
2066 else:
2066 else:
2067 if context is None and x.startswith(b'***************'):
2067 if context is None and x.startswith(b'***************'):
2068 context = True
2068 context = True
2069 h = hunk(x, hunknum + 1, lr, context)
2069 h = hunk(x, hunknum + 1, lr, context)
2070 hunknum += 1
2070 hunknum += 1
2071 if emitfile:
2071 if emitfile:
2072 emitfile = False
2072 emitfile = False
2073 yield b'file', (afile, bfile, h, gp and gp.copy() or None)
2073 yield b'file', (afile, bfile, h, gp and gp.copy() or None)
2074 yield b'hunk', h
2074 yield b'hunk', h
2075 elif x.startswith(b'diff --git a/'):
2075 elif x.startswith(b'diff --git a/'):
2076 m = gitre.match(x.rstrip(b'\r\n'))
2076 m = gitre.match(x.rstrip(b'\r\n'))
2077 if not m:
2077 if not m:
2078 continue
2078 continue
2079 if gitpatches is None:
2079 if gitpatches is None:
2080 # scan whole input for git metadata
2080 # scan whole input for git metadata
2081 gitpatches = scangitpatch(lr, x)
2081 gitpatches = scangitpatch(lr, x)
2082 yield b'git', [
2082 yield b'git', [
2083 g.copy() for g in gitpatches if g.op in (b'COPY', b'RENAME')
2083 g.copy() for g in gitpatches if g.op in (b'COPY', b'RENAME')
2084 ]
2084 ]
2085 gitpatches.reverse()
2085 gitpatches.reverse()
2086 afile = b'a/' + m.group(1)
2086 afile = b'a/' + m.group(1)
2087 bfile = b'b/' + m.group(2)
2087 bfile = b'b/' + m.group(2)
2088 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
2088 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
2089 gp = gitpatches.pop()
2089 gp = gitpatches.pop()
2090 yield b'file', (
2090 yield b'file', (
2091 b'a/' + gp.path,
2091 b'a/' + gp.path,
2092 b'b/' + gp.path,
2092 b'b/' + gp.path,
2093 None,
2093 None,
2094 gp.copy(),
2094 gp.copy(),
2095 )
2095 )
2096 if not gitpatches:
2096 if not gitpatches:
2097 raise PatchParseError(
2097 raise PatchParseError(
2098 _(b'failed to synchronize metadata for "%s"') % afile[2:]
2098 _(b'failed to synchronize metadata for "%s"') % afile[2:]
2099 )
2099 )
2100 newfile = True
2100 newfile = True
2101 elif x.startswith(b'---'):
2101 elif x.startswith(b'---'):
2102 # check for a unified diff
2102 # check for a unified diff
2103 l2 = lr.readline()
2103 l2 = lr.readline()
2104 if not l2.startswith(b'+++'):
2104 if not l2.startswith(b'+++'):
2105 lr.push(l2)
2105 lr.push(l2)
2106 continue
2106 continue
2107 newfile = True
2107 newfile = True
2108 context = False
2108 context = False
2109 afile = parsefilename(x)
2109 afile = parsefilename(x)
2110 bfile = parsefilename(l2)
2110 bfile = parsefilename(l2)
2111 elif x.startswith(b'***'):
2111 elif x.startswith(b'***'):
2112 # check for a context diff
2112 # check for a context diff
2113 l2 = lr.readline()
2113 l2 = lr.readline()
2114 if not l2.startswith(b'---'):
2114 if not l2.startswith(b'---'):
2115 lr.push(l2)
2115 lr.push(l2)
2116 continue
2116 continue
2117 l3 = lr.readline()
2117 l3 = lr.readline()
2118 lr.push(l3)
2118 lr.push(l3)
2119 if not l3.startswith(b"***************"):
2119 if not l3.startswith(b"***************"):
2120 lr.push(l2)
2120 lr.push(l2)
2121 continue
2121 continue
2122 newfile = True
2122 newfile = True
2123 context = True
2123 context = True
2124 afile = parsefilename(x)
2124 afile = parsefilename(x)
2125 bfile = parsefilename(l2)
2125 bfile = parsefilename(l2)
2126
2126
2127 if newfile:
2127 if newfile:
2128 newfile = False
2128 newfile = False
2129 emitfile = True
2129 emitfile = True
2130 state = BFILE
2130 state = BFILE
2131 hunknum = 0
2131 hunknum = 0
2132
2132
2133 while gitpatches:
2133 while gitpatches:
2134 gp = gitpatches.pop()
2134 gp = gitpatches.pop()
2135 yield b'file', (b'a/' + gp.path, b'b/' + gp.path, None, gp.copy())
2135 yield b'file', (b'a/' + gp.path, b'b/' + gp.path, None, gp.copy())
2136
2136
2137
2137
2138 def applybindelta(binchunk, data):
2138 def applybindelta(binchunk, data):
2139 """Apply a binary delta hunk
2139 """Apply a binary delta hunk
2140 The algorithm used is the algorithm from git's patch-delta.c
2140 The algorithm used is the algorithm from git's patch-delta.c
2141 """
2141 """
2142
2142
2143 def deltahead(binchunk):
2143 def deltahead(binchunk):
2144 i = 0
2144 i = 0
2145 for c in pycompat.bytestr(binchunk):
2145 for c in pycompat.bytestr(binchunk):
2146 i += 1
2146 i += 1
2147 if not (ord(c) & 0x80):
2147 if not (ord(c) & 0x80):
2148 return i
2148 return i
2149 return i
2149 return i
2150
2150
2151 out = b""
2151 out = b""
2152 s = deltahead(binchunk)
2152 s = deltahead(binchunk)
2153 binchunk = binchunk[s:]
2153 binchunk = binchunk[s:]
2154 s = deltahead(binchunk)
2154 s = deltahead(binchunk)
2155 binchunk = binchunk[s:]
2155 binchunk = binchunk[s:]
2156 i = 0
2156 i = 0
2157 while i < len(binchunk):
2157 while i < len(binchunk):
2158 cmd = ord(binchunk[i : i + 1])
2158 cmd = ord(binchunk[i : i + 1])
2159 i += 1
2159 i += 1
2160 if cmd & 0x80:
2160 if cmd & 0x80:
2161 offset = 0
2161 offset = 0
2162 size = 0
2162 size = 0
2163 if cmd & 0x01:
2163 if cmd & 0x01:
2164 offset = ord(binchunk[i : i + 1])
2164 offset = ord(binchunk[i : i + 1])
2165 i += 1
2165 i += 1
2166 if cmd & 0x02:
2166 if cmd & 0x02:
2167 offset |= ord(binchunk[i : i + 1]) << 8
2167 offset |= ord(binchunk[i : i + 1]) << 8
2168 i += 1
2168 i += 1
2169 if cmd & 0x04:
2169 if cmd & 0x04:
2170 offset |= ord(binchunk[i : i + 1]) << 16
2170 offset |= ord(binchunk[i : i + 1]) << 16
2171 i += 1
2171 i += 1
2172 if cmd & 0x08:
2172 if cmd & 0x08:
2173 offset |= ord(binchunk[i : i + 1]) << 24
2173 offset |= ord(binchunk[i : i + 1]) << 24
2174 i += 1
2174 i += 1
2175 if cmd & 0x10:
2175 if cmd & 0x10:
2176 size = ord(binchunk[i : i + 1])
2176 size = ord(binchunk[i : i + 1])
2177 i += 1
2177 i += 1
2178 if cmd & 0x20:
2178 if cmd & 0x20:
2179 size |= ord(binchunk[i : i + 1]) << 8
2179 size |= ord(binchunk[i : i + 1]) << 8
2180 i += 1
2180 i += 1
2181 if cmd & 0x40:
2181 if cmd & 0x40:
2182 size |= ord(binchunk[i : i + 1]) << 16
2182 size |= ord(binchunk[i : i + 1]) << 16
2183 i += 1
2183 i += 1
2184 if size == 0:
2184 if size == 0:
2185 size = 0x10000
2185 size = 0x10000
2186 offset_end = offset + size
2186 offset_end = offset + size
2187 out += data[offset:offset_end]
2187 out += data[offset:offset_end]
2188 elif cmd != 0:
2188 elif cmd != 0:
2189 offset_end = i + cmd
2189 offset_end = i + cmd
2190 out += binchunk[i:offset_end]
2190 out += binchunk[i:offset_end]
2191 i += cmd
2191 i += cmd
2192 else:
2192 else:
2193 raise PatchApplicationError(_(b'unexpected delta opcode 0'))
2193 raise PatchApplicationError(_(b'unexpected delta opcode 0'))
2194 return out
2194 return out
2195
2195
2196
2196
2197 def applydiff(ui, fp, backend, store, strip=1, prefix=b'', eolmode=b'strict'):
2197 def applydiff(ui, fp, backend, store, strip=1, prefix=b'', eolmode=b'strict'):
2198 """Reads a patch from fp and tries to apply it.
2198 """Reads a patch from fp and tries to apply it.
2199
2199
2200 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2200 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2201 there was any fuzz.
2201 there was any fuzz.
2202
2202
2203 If 'eolmode' is 'strict', the patch content and patched file are
2203 If 'eolmode' is 'strict', the patch content and patched file are
2204 read in binary mode. Otherwise, line endings are ignored when
2204 read in binary mode. Otherwise, line endings are ignored when
2205 patching then normalized according to 'eolmode'.
2205 patching then normalized according to 'eolmode'.
2206 """
2206 """
2207 return _applydiff(
2207 return _applydiff(
2208 ui,
2208 ui,
2209 fp,
2209 fp,
2210 patchfile,
2210 patchfile,
2211 backend,
2211 backend,
2212 store,
2212 store,
2213 strip=strip,
2213 strip=strip,
2214 prefix=prefix,
2214 prefix=prefix,
2215 eolmode=eolmode,
2215 eolmode=eolmode,
2216 )
2216 )
2217
2217
2218
2218
2219 def _canonprefix(repo, prefix):
2219 def _canonprefix(repo, prefix):
2220 if prefix:
2220 if prefix:
2221 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2221 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2222 if prefix != b'':
2222 if prefix != b'':
2223 prefix += b'/'
2223 prefix += b'/'
2224 return prefix
2224 return prefix
2225
2225
2226
2226
2227 def _applydiff(
2227 def _applydiff(
2228 ui, fp, patcher, backend, store, strip=1, prefix=b'', eolmode=b'strict'
2228 ui, fp, patcher, backend, store, strip=1, prefix=b'', eolmode=b'strict'
2229 ):
2229 ):
2230 prefix = _canonprefix(backend.repo, prefix)
2230 prefix = _canonprefix(backend.repo, prefix)
2231
2231
2232 def pstrip(p):
2232 def pstrip(p):
2233 return pathtransform(p, strip - 1, prefix)[1]
2233 return pathtransform(p, strip - 1, prefix)[1]
2234
2234
2235 rejects = 0
2235 rejects = 0
2236 err = 0
2236 err = 0
2237 current_file = None
2237 current_file = None
2238
2238
2239 for state, values in iterhunks(fp):
2239 for state, values in iterhunks(fp):
2240 if state == b'hunk':
2240 if state == b'hunk':
2241 if not current_file:
2241 if not current_file:
2242 continue
2242 continue
2243 ret = current_file.apply(values)
2243 ret = current_file.apply(values)
2244 if ret > 0:
2244 if ret > 0:
2245 err = 1
2245 err = 1
2246 elif state == b'file':
2246 elif state == b'file':
2247 if current_file:
2247 if current_file:
2248 rejects += current_file.close()
2248 rejects += current_file.close()
2249 current_file = None
2249 current_file = None
2250 afile, bfile, first_hunk, gp = values
2250 afile, bfile, first_hunk, gp = values
2251 if gp:
2251 if gp:
2252 gp.path = pstrip(gp.path)
2252 gp.path = pstrip(gp.path)
2253 if gp.oldpath:
2253 if gp.oldpath:
2254 gp.oldpath = pstrip(gp.oldpath)
2254 gp.oldpath = pstrip(gp.oldpath)
2255 else:
2255 else:
2256 gp = makepatchmeta(
2256 gp = makepatchmeta(
2257 backend, afile, bfile, first_hunk, strip, prefix
2257 backend, afile, bfile, first_hunk, strip, prefix
2258 )
2258 )
2259 if gp.op == b'RENAME':
2259 if gp.op == b'RENAME':
2260 backend.unlink(gp.oldpath)
2260 backend.unlink(gp.oldpath)
2261 if not first_hunk:
2261 if not first_hunk:
2262 if gp.op == b'DELETE':
2262 if gp.op == b'DELETE':
2263 backend.unlink(gp.path)
2263 backend.unlink(gp.path)
2264 continue
2264 continue
2265 data, mode = None, None
2265 data, mode = None, None
2266 if gp.op in (b'RENAME', b'COPY'):
2266 if gp.op in (b'RENAME', b'COPY'):
2267 data, mode = store.getfile(gp.oldpath)[:2]
2267 data, mode = store.getfile(gp.oldpath)[:2]
2268 if data is None:
2268 if data is None:
2269 # This means that the old path does not exist
2269 # This means that the old path does not exist
2270 raise PatchApplicationError(
2270 raise PatchApplicationError(
2271 _(b"source file '%s' does not exist") % gp.oldpath
2271 _(b"source file '%s' does not exist") % gp.oldpath
2272 )
2272 )
2273 if gp.mode:
2273 if gp.mode:
2274 mode = gp.mode
2274 mode = gp.mode
2275 if gp.op == b'ADD':
2275 if gp.op == b'ADD':
2276 # Added files without content have no hunk and
2276 # Added files without content have no hunk and
2277 # must be created
2277 # must be created
2278 data = b''
2278 data = b''
2279 if data or mode:
2279 if data or mode:
2280 if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
2280 if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
2281 gp.path
2281 gp.path
2282 ):
2282 ):
2283 raise PatchApplicationError(
2283 raise PatchApplicationError(
2284 _(
2284 _(
2285 b"cannot create %s: destination "
2285 b"cannot create %s: destination "
2286 b"already exists"
2286 b"already exists"
2287 )
2287 )
2288 % gp.path
2288 % gp.path
2289 )
2289 )
2290 backend.setfile(gp.path, data, mode, gp.oldpath)
2290 backend.setfile(gp.path, data, mode, gp.oldpath)
2291 continue
2291 continue
2292 try:
2292 try:
2293 current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
2293 current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
2294 except PatchError as inst:
2294 except PatchError as inst:
2295 ui.warn(stringutil.forcebytestr(inst) + b'\n')
2295 ui.warn(stringutil.forcebytestr(inst) + b'\n')
2296 current_file = None
2296 current_file = None
2297 rejects += 1
2297 rejects += 1
2298 continue
2298 continue
2299 elif state == b'git':
2299 elif state == b'git':
2300 for gp in values:
2300 for gp in values:
2301 path = pstrip(gp.oldpath)
2301 path = pstrip(gp.oldpath)
2302 data, mode = backend.getfile(path)
2302 data, mode = backend.getfile(path)
2303 if data is None:
2303 if data is None:
2304 # The error ignored here will trigger a getfile()
2304 # The error ignored here will trigger a getfile()
2305 # error in a place more appropriate for error
2305 # error in a place more appropriate for error
2306 # handling, and will not interrupt the patching
2306 # handling, and will not interrupt the patching
2307 # process.
2307 # process.
2308 pass
2308 pass
2309 else:
2309 else:
2310 store.setfile(path, data, mode)
2310 store.setfile(path, data, mode)
2311 else:
2311 else:
2312 raise error.Abort(_(b'unsupported parser state: %s') % state)
2312 raise error.Abort(_(b'unsupported parser state: %s') % state)
2313
2313
2314 if current_file:
2314 if current_file:
2315 rejects += current_file.close()
2315 rejects += current_file.close()
2316
2316
2317 if rejects:
2317 if rejects:
2318 return -1
2318 return -1
2319 return err
2319 return err
2320
2320
2321
2321
2322 def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity):
2322 def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity):
2323 """use <patcher> to apply <patchname> to the working directory.
2323 """use <patcher> to apply <patchname> to the working directory.
2324 returns whether patch was applied with fuzz factor."""
2324 returns whether patch was applied with fuzz factor."""
2325
2325
2326 fuzz = False
2326 fuzz = False
2327 args = []
2327 args = []
2328 cwd = repo.root
2328 cwd = repo.root
2329 if cwd:
2329 if cwd:
2330 args.append(b'-d %s' % procutil.shellquote(cwd))
2330 args.append(b'-d %s' % procutil.shellquote(cwd))
2331 cmd = b'%s %s -p%d < %s' % (
2331 cmd = b'%s %s -p%d < %s' % (
2332 patcher,
2332 patcher,
2333 b' '.join(args),
2333 b' '.join(args),
2334 strip,
2334 strip,
2335 procutil.shellquote(patchname),
2335 procutil.shellquote(patchname),
2336 )
2336 )
2337 ui.debug(b'Using external patch tool: %s\n' % cmd)
2337 ui.debug(b'Using external patch tool: %s\n' % cmd)
2338 fp = procutil.popen(cmd, b'rb')
2338 fp = procutil.popen(cmd, b'rb')
2339 try:
2339 try:
2340 for line in fp:
2340 for line in fp:
2341 line = line.rstrip()
2341 line = line.rstrip()
2342 ui.note(line + b'\n')
2342 ui.note(line + b'\n')
2343 if line.startswith(b'patching file '):
2343 if line.startswith(b'patching file '):
2344 pf = util.parsepatchoutput(line)
2344 pf = util.parsepatchoutput(line)
2345 printed_file = False
2345 printed_file = False
2346 files.add(pf)
2346 files.add(pf)
2347 elif line.find(b'with fuzz') >= 0:
2347 elif line.find(b'with fuzz') >= 0:
2348 fuzz = True
2348 fuzz = True
2349 if not printed_file:
2349 if not printed_file:
2350 ui.warn(pf + b'\n')
2350 ui.warn(pf + b'\n')
2351 printed_file = True
2351 printed_file = True
2352 ui.warn(line + b'\n')
2352 ui.warn(line + b'\n')
2353 elif line.find(b'saving rejects to file') >= 0:
2353 elif line.find(b'saving rejects to file') >= 0:
2354 ui.warn(line + b'\n')
2354 ui.warn(line + b'\n')
2355 elif line.find(b'FAILED') >= 0:
2355 elif line.find(b'FAILED') >= 0:
2356 if not printed_file:
2356 if not printed_file:
2357 ui.warn(pf + b'\n')
2357 ui.warn(pf + b'\n')
2358 printed_file = True
2358 printed_file = True
2359 ui.warn(line + b'\n')
2359 ui.warn(line + b'\n')
2360 finally:
2360 finally:
2361 if files:
2361 if files:
2362 scmutil.marktouched(repo, files, similarity)
2362 scmutil.marktouched(repo, files, similarity)
2363 code = fp.close()
2363 code = fp.close()
2364 if code:
2364 if code:
2365 raise PatchApplicationError(
2365 raise PatchApplicationError(
2366 _(b"patch command failed: %s") % procutil.explainexit(code)
2366 _(b"patch command failed: %s") % procutil.explainexit(code)
2367 )
2367 )
2368 return fuzz
2368 return fuzz
2369
2369
2370
2370
2371 def patchbackend(
2371 def patchbackend(
2372 ui, backend, patchobj, strip, prefix, files=None, eolmode=b'strict'
2372 ui, backend, patchobj, strip, prefix, files=None, eolmode=b'strict'
2373 ):
2373 ):
2374 if files is None:
2374 if files is None:
2375 files = set()
2375 files = set()
2376 if eolmode is None:
2376 if eolmode is None:
2377 eolmode = ui.config(b'patch', b'eol')
2377 eolmode = ui.config(b'patch', b'eol')
2378 if eolmode.lower() not in eolmodes:
2378 if eolmode.lower() not in eolmodes:
2379 raise error.Abort(_(b'unsupported line endings type: %s') % eolmode)
2379 raise error.Abort(_(b'unsupported line endings type: %s') % eolmode)
2380 eolmode = eolmode.lower()
2380 eolmode = eolmode.lower()
2381
2381
2382 store = filestore()
2382 store = filestore()
2383 try:
2383 try:
2384 fp = open(patchobj, b'rb')
2384 fp = open(patchobj, b'rb')
2385 except TypeError:
2385 except TypeError:
2386 fp = patchobj
2386 fp = patchobj
2387 try:
2387 try:
2388 ret = applydiff(
2388 ret = applydiff(
2389 ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode
2389 ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode
2390 )
2390 )
2391 finally:
2391 finally:
2392 if fp != patchobj:
2392 if fp != patchobj:
2393 fp.close()
2393 fp.close()
2394 files.update(backend.close())
2394 files.update(backend.close())
2395 store.close()
2395 store.close()
2396 if ret < 0:
2396 if ret < 0:
2397 raise PatchApplicationError(_(b'patch failed to apply'))
2397 raise PatchApplicationError(_(b'patch failed to apply'))
2398 return ret > 0
2398 return ret > 0
2399
2399
2400
2400
2401 def internalpatch(
2401 def internalpatch(
2402 ui,
2402 ui,
2403 repo,
2403 repo,
2404 patchobj,
2404 patchobj,
2405 strip,
2405 strip,
2406 prefix=b'',
2406 prefix=b'',
2407 files=None,
2407 files=None,
2408 eolmode=b'strict',
2408 eolmode=b'strict',
2409 similarity=0,
2409 similarity=0,
2410 ):
2410 ):
2411 """use builtin patch to apply <patchobj> to the working directory.
2411 """use builtin patch to apply <patchobj> to the working directory.
2412 returns whether patch was applied with fuzz factor."""
2412 returns whether patch was applied with fuzz factor."""
2413 backend = workingbackend(ui, repo, similarity)
2413 backend = workingbackend(ui, repo, similarity)
2414 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2414 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2415
2415
2416
2416
2417 def patchrepo(
2417 def patchrepo(
2418 ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode=b'strict'
2418 ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode=b'strict'
2419 ):
2419 ):
2420 backend = repobackend(ui, repo, ctx, store)
2420 backend = repobackend(ui, repo, ctx, store)
2421 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2421 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2422
2422
2423
2423
2424 def patch(
2424 def patch(
2425 ui,
2425 ui,
2426 repo,
2426 repo,
2427 patchname,
2427 patchname,
2428 strip=1,
2428 strip=1,
2429 prefix=b'',
2429 prefix=b'',
2430 files=None,
2430 files=None,
2431 eolmode=b'strict',
2431 eolmode=b'strict',
2432 similarity=0,
2432 similarity=0,
2433 ):
2433 ):
2434 """Apply <patchname> to the working directory.
2434 """Apply <patchname> to the working directory.
2435
2435
2436 'eolmode' specifies how end of lines should be handled. It can be:
2436 'eolmode' specifies how end of lines should be handled. It can be:
2437 - 'strict': inputs are read in binary mode, EOLs are preserved
2437 - 'strict': inputs are read in binary mode, EOLs are preserved
2438 - 'crlf': EOLs are ignored when patching and reset to CRLF
2438 - 'crlf': EOLs are ignored when patching and reset to CRLF
2439 - 'lf': EOLs are ignored when patching and reset to LF
2439 - 'lf': EOLs are ignored when patching and reset to LF
2440 - None: get it from user settings, default to 'strict'
2440 - None: get it from user settings, default to 'strict'
2441 'eolmode' is ignored when using an external patcher program.
2441 'eolmode' is ignored when using an external patcher program.
2442
2442
2443 Returns whether patch was applied with fuzz factor.
2443 Returns whether patch was applied with fuzz factor.
2444 """
2444 """
2445 patcher = ui.config(b'ui', b'patch')
2445 patcher = ui.config(b'ui', b'patch')
2446 if files is None:
2446 if files is None:
2447 files = set()
2447 files = set()
2448 if patcher:
2448 if patcher:
2449 return _externalpatch(
2449 return _externalpatch(
2450 ui, repo, patcher, patchname, strip, files, similarity
2450 ui, repo, patcher, patchname, strip, files, similarity
2451 )
2451 )
2452 return internalpatch(
2452 return internalpatch(
2453 ui, repo, patchname, strip, prefix, files, eolmode, similarity
2453 ui, repo, patchname, strip, prefix, files, eolmode, similarity
2454 )
2454 )
2455
2455
2456
2456
2457 def changedfiles(ui, repo, patchpath, strip=1, prefix=b''):
2457 def changedfiles(ui, repo, patchpath, strip=1, prefix=b''):
2458 backend = fsbackend(ui, repo.root)
2458 backend = fsbackend(ui, repo.root)
2459 prefix = _canonprefix(repo, prefix)
2459 prefix = _canonprefix(repo, prefix)
2460 with open(patchpath, b'rb') as fp:
2460 with open(patchpath, b'rb') as fp:
2461 changed = set()
2461 changed = set()
2462 for state, values in iterhunks(fp):
2462 for state, values in iterhunks(fp):
2463 if state == b'file':
2463 if state == b'file':
2464 afile, bfile, first_hunk, gp = values
2464 afile, bfile, first_hunk, gp = values
2465 if gp:
2465 if gp:
2466 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2466 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2467 if gp.oldpath:
2467 if gp.oldpath:
2468 gp.oldpath = pathtransform(
2468 gp.oldpath = pathtransform(
2469 gp.oldpath, strip - 1, prefix
2469 gp.oldpath, strip - 1, prefix
2470 )[1]
2470 )[1]
2471 else:
2471 else:
2472 gp = makepatchmeta(
2472 gp = makepatchmeta(
2473 backend, afile, bfile, first_hunk, strip, prefix
2473 backend, afile, bfile, first_hunk, strip, prefix
2474 )
2474 )
2475 changed.add(gp.path)
2475 changed.add(gp.path)
2476 if gp.op == b'RENAME':
2476 if gp.op == b'RENAME':
2477 changed.add(gp.oldpath)
2477 changed.add(gp.oldpath)
2478 elif state not in (b'hunk', b'git'):
2478 elif state not in (b'hunk', b'git'):
2479 raise error.Abort(_(b'unsupported parser state: %s') % state)
2479 raise error.Abort(_(b'unsupported parser state: %s') % state)
2480 return changed
2480 return changed
2481
2481
2482
2482
2483 class GitDiffRequired(Exception):
2483 class GitDiffRequired(Exception):
2484 pass
2484 pass
2485
2485
2486
2486
2487 diffopts = diffutil.diffallopts
2487 diffopts = diffutil.diffallopts
2488 diffallopts = diffutil.diffallopts
2488 diffallopts = diffutil.diffallopts
2489 difffeatureopts = diffutil.difffeatureopts
2489 difffeatureopts = diffutil.difffeatureopts
2490
2490
2491
2491
2492 def diff(
2492 def diff(
2493 repo,
2493 repo,
2494 node1=None,
2494 node1=None,
2495 node2=None,
2495 node2=None,
2496 match=None,
2496 match=None,
2497 changes=None,
2497 changes=None,
2498 opts=None,
2498 opts=None,
2499 losedatafn=None,
2499 losedatafn=None,
2500 pathfn=None,
2500 pathfn=None,
2501 copy=None,
2501 copy=None,
2502 copysourcematch=None,
2502 copysourcematch=None,
2503 hunksfilterfn=None,
2503 hunksfilterfn=None,
2504 ):
2504 ):
2505 """yields diff of changes to files between two nodes, or node and
2505 """yields diff of changes to files between two nodes, or node and
2506 working directory.
2506 working directory.
2507
2507
2508 if node1 is None, use first dirstate parent instead.
2508 if node1 is None, use first dirstate parent instead.
2509 if node2 is None, compare node1 with working directory.
2509 if node2 is None, compare node1 with working directory.
2510
2510
2511 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2511 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2512 every time some change cannot be represented with the current
2512 every time some change cannot be represented with the current
2513 patch format. Return False to upgrade to git patch format, True to
2513 patch format. Return False to upgrade to git patch format, True to
2514 accept the loss or raise an exception to abort the diff. It is
2514 accept the loss or raise an exception to abort the diff. It is
2515 called with the name of current file being diffed as 'fn'. If set
2515 called with the name of current file being diffed as 'fn'. If set
2516 to None, patches will always be upgraded to git format when
2516 to None, patches will always be upgraded to git format when
2517 necessary.
2517 necessary.
2518
2518
2519 prefix is a filename prefix that is prepended to all filenames on
2519 prefix is a filename prefix that is prepended to all filenames on
2520 display (used for subrepos).
2520 display (used for subrepos).
2521
2521
2522 relroot, if not empty, must be normalized with a trailing /. Any match
2522 relroot, if not empty, must be normalized with a trailing /. Any match
2523 patterns that fall outside it will be ignored.
2523 patterns that fall outside it will be ignored.
2524
2524
2525 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2525 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2526 information.
2526 information.
2527
2527
2528 if copysourcematch is not None, then copy sources will be filtered by this
2528 if copysourcematch is not None, then copy sources will be filtered by this
2529 matcher
2529 matcher
2530
2530
2531 hunksfilterfn, if not None, should be a function taking a filectx and
2531 hunksfilterfn, if not None, should be a function taking a filectx and
2532 hunks generator that may yield filtered hunks.
2532 hunks generator that may yield filtered hunks.
2533 """
2533 """
2534 if not node1 and not node2:
2534 if not node1 and not node2:
2535 node1 = repo.dirstate.p1()
2535 node1 = repo.dirstate.p1()
2536
2536
2537 ctx1 = repo[node1]
2537 ctx1 = repo[node1]
2538 ctx2 = repo[node2]
2538 ctx2 = repo[node2]
2539
2539
2540 for fctx1, fctx2, hdr, hunks in diffhunks(
2540 for fctx1, fctx2, hdr, hunks in diffhunks(
2541 repo,
2541 repo,
2542 ctx1=ctx1,
2542 ctx1=ctx1,
2543 ctx2=ctx2,
2543 ctx2=ctx2,
2544 match=match,
2544 match=match,
2545 changes=changes,
2545 changes=changes,
2546 opts=opts,
2546 opts=opts,
2547 losedatafn=losedatafn,
2547 losedatafn=losedatafn,
2548 pathfn=pathfn,
2548 pathfn=pathfn,
2549 copy=copy,
2549 copy=copy,
2550 copysourcematch=copysourcematch,
2550 copysourcematch=copysourcematch,
2551 ):
2551 ):
2552 if hunksfilterfn is not None:
2552 if hunksfilterfn is not None:
2553 # If the file has been removed, fctx2 is None; but this should
2553 # If the file has been removed, fctx2 is None; but this should
2554 # not occur here since we catch removed files early in
2554 # not occur here since we catch removed files early in
2555 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2555 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2556 assert (
2556 assert (
2557 fctx2 is not None
2557 fctx2 is not None
2558 ), b'fctx2 unexpectly None in diff hunks filtering'
2558 ), b'fctx2 unexpectly None in diff hunks filtering'
2559 hunks = hunksfilterfn(fctx2, hunks)
2559 hunks = hunksfilterfn(fctx2, hunks)
2560 text = b''.join(b''.join(hlines) for hrange, hlines in hunks)
2560 text = b''.join(b''.join(hlines) for hrange, hlines in hunks)
2561 if hdr and (text or len(hdr) > 1):
2561 if hdr and (text or len(hdr) > 1):
2562 yield b'\n'.join(hdr) + b'\n'
2562 yield b'\n'.join(hdr) + b'\n'
2563 if text:
2563 if text:
2564 yield text
2564 yield text
2565
2565
2566
2566
2567 def diffhunks(
2567 def diffhunks(
2568 repo,
2568 repo,
2569 ctx1,
2569 ctx1,
2570 ctx2,
2570 ctx2,
2571 match=None,
2571 match=None,
2572 changes=None,
2572 changes=None,
2573 opts=None,
2573 opts=None,
2574 losedatafn=None,
2574 losedatafn=None,
2575 pathfn=None,
2575 pathfn=None,
2576 copy=None,
2576 copy=None,
2577 copysourcematch=None,
2577 copysourcematch=None,
2578 ):
2578 ):
2579 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2579 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2580 where `header` is a list of diff headers and `hunks` is an iterable of
2580 where `header` is a list of diff headers and `hunks` is an iterable of
2581 (`hunkrange`, `hunklines`) tuples.
2581 (`hunkrange`, `hunklines`) tuples.
2582
2582
2583 See diff() for the meaning of parameters.
2583 See diff() for the meaning of parameters.
2584 """
2584 """
2585
2585
2586 if opts is None:
2586 if opts is None:
2587 opts = mdiff.defaultopts
2587 opts = mdiff.defaultopts
2588
2588
2589 def lrugetfilectx():
2589 def lrugetfilectx():
2590 cache = {}
2590 cache = {}
2591 order = collections.deque()
2591 order = collections.deque()
2592
2592
2593 def getfilectx(f, ctx):
2593 def getfilectx(f, ctx):
2594 fctx = ctx.filectx(f, filelog=cache.get(f))
2594 fctx = ctx.filectx(f, filelog=cache.get(f))
2595 if f not in cache:
2595 if f not in cache:
2596 if len(cache) > 20:
2596 if len(cache) > 20:
2597 del cache[order.popleft()]
2597 del cache[order.popleft()]
2598 cache[f] = fctx.filelog()
2598 cache[f] = fctx.filelog()
2599 else:
2599 else:
2600 order.remove(f)
2600 order.remove(f)
2601 order.append(f)
2601 order.append(f)
2602 return fctx
2602 return fctx
2603
2603
2604 return getfilectx
2604 return getfilectx
2605
2605
2606 getfilectx = lrugetfilectx()
2606 getfilectx = lrugetfilectx()
2607
2607
2608 if not changes:
2608 if not changes:
2609 changes = ctx1.status(ctx2, match=match)
2609 changes = ctx1.status(ctx2, match=match)
2610 if isinstance(changes, list):
2610 if isinstance(changes, list):
2611 modified, added, removed = changes[:3]
2611 modified, added, removed = changes[:3]
2612 else:
2612 else:
2613 modified, added, removed = (
2613 modified, added, removed = (
2614 changes.modified,
2614 changes.modified,
2615 changes.added,
2615 changes.added,
2616 changes.removed,
2616 changes.removed,
2617 )
2617 )
2618
2618
2619 if not modified and not added and not removed:
2619 if not modified and not added and not removed:
2620 return []
2620 return []
2621
2621
2622 if repo.ui.debugflag:
2622 if repo.ui.debugflag:
2623 hexfunc = hex
2623 hexfunc = hex
2624 else:
2624 else:
2625 hexfunc = short
2625 hexfunc = short
2626 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2626 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2627
2627
2628 if copy is None:
2628 if copy is None:
2629 copy = {}
2629 copy = {}
2630 if opts.git or opts.upgrade:
2630 if opts.git or opts.upgrade:
2631 copy = copies.pathcopies(ctx1, ctx2, match=match)
2631 copy = copies.pathcopies(ctx1, ctx2, match=match)
2632
2632
2633 if copysourcematch:
2633 if copysourcematch:
2634 # filter out copies where source side isn't inside the matcher
2634 # filter out copies where source side isn't inside the matcher
2635 # (copies.pathcopies() already filtered out the destination)
2635 # (copies.pathcopies() already filtered out the destination)
2636 copy = {dst: src for dst, src in copy.items() if copysourcematch(src)}
2636 copy = {dst: src for dst, src in copy.items() if copysourcematch(src)}
2637
2637
2638 modifiedset = set(modified)
2638 modifiedset = set(modified)
2639 addedset = set(added)
2639 addedset = set(added)
2640 removedset = set(removed)
2640 removedset = set(removed)
2641 for f in modified:
2641 for f in modified:
2642 if f not in ctx1:
2642 if f not in ctx1:
2643 # Fix up added, since merged-in additions appear as
2643 # Fix up added, since merged-in additions appear as
2644 # modifications during merges
2644 # modifications during merges
2645 modifiedset.remove(f)
2645 modifiedset.remove(f)
2646 addedset.add(f)
2646 addedset.add(f)
2647 for f in removed:
2647 for f in removed:
2648 if f not in ctx1:
2648 if f not in ctx1:
2649 # Merged-in additions that are then removed are reported as removed.
2649 # Merged-in additions that are then removed are reported as removed.
2650 # They are not in ctx1, so We don't want to show them in the diff.
2650 # They are not in ctx1, so We don't want to show them in the diff.
2651 removedset.remove(f)
2651 removedset.remove(f)
2652 modified = sorted(modifiedset)
2652 modified = sorted(modifiedset)
2653 added = sorted(addedset)
2653 added = sorted(addedset)
2654 removed = sorted(removedset)
2654 removed = sorted(removedset)
2655 for dst, src in list(copy.items()):
2655 for dst, src in list(copy.items()):
2656 if src not in ctx1:
2656 if src not in ctx1:
2657 # Files merged in during a merge and then copied/renamed are
2657 # Files merged in during a merge and then copied/renamed are
2658 # reported as copies. We want to show them in the diff as additions.
2658 # reported as copies. We want to show them in the diff as additions.
2659 del copy[dst]
2659 del copy[dst]
2660
2660
2661 prefetchmatch = scmutil.matchfiles(
2661 prefetchmatch = scmutil.matchfiles(
2662 repo, list(modifiedset | addedset | removedset)
2662 repo, list(modifiedset | addedset | removedset)
2663 )
2663 )
2664 revmatches = [
2664 revmatches = [
2665 (ctx1.rev(), prefetchmatch),
2665 (ctx1.rev(), prefetchmatch),
2666 (ctx2.rev(), prefetchmatch),
2666 (ctx2.rev(), prefetchmatch),
2667 ]
2667 ]
2668 scmutil.prefetchfiles(repo, revmatches)
2668 scmutil.prefetchfiles(repo, revmatches)
2669
2669
2670 def difffn(opts, losedata):
2670 def difffn(opts, losedata):
2671 return trydiff(
2671 return trydiff(
2672 repo,
2672 repo,
2673 revs,
2673 revs,
2674 ctx1,
2674 ctx1,
2675 ctx2,
2675 ctx2,
2676 modified,
2676 modified,
2677 added,
2677 added,
2678 removed,
2678 removed,
2679 copy,
2679 copy,
2680 getfilectx,
2680 getfilectx,
2681 opts,
2681 opts,
2682 losedata,
2682 losedata,
2683 pathfn,
2683 pathfn,
2684 )
2684 )
2685
2685
2686 if opts.upgrade and not opts.git:
2686 if opts.upgrade and not opts.git:
2687 try:
2687 try:
2688
2688
2689 def losedata(fn):
2689 def losedata(fn):
2690 if not losedatafn or not losedatafn(fn=fn):
2690 if not losedatafn or not losedatafn(fn=fn):
2691 raise GitDiffRequired
2691 raise GitDiffRequired
2692
2692
2693 # Buffer the whole output until we are sure it can be generated
2693 # Buffer the whole output until we are sure it can be generated
2694 return list(difffn(opts.copy(git=False), losedata))
2694 return list(difffn(opts.copy(git=False), losedata))
2695 except GitDiffRequired:
2695 except GitDiffRequired:
2696 return difffn(opts.copy(git=True), None)
2696 return difffn(opts.copy(git=True), None)
2697 else:
2697 else:
2698 return difffn(opts, None)
2698 return difffn(opts, None)
2699
2699
2700
2700
2701 def diffsinglehunk(hunklines):
2701 def diffsinglehunk(hunklines):
2702 """yield tokens for a list of lines in a single hunk"""
2702 """yield tokens for a list of lines in a single hunk"""
2703 for line in hunklines:
2703 for line in hunklines:
2704 # chomp
2704 # chomp
2705 chompline = line.rstrip(b'\r\n')
2705 chompline = line.rstrip(b'\r\n')
2706 # highlight tabs and trailing whitespace
2706 # highlight tabs and trailing whitespace
2707 stripline = chompline.rstrip()
2707 stripline = chompline.rstrip()
2708 if line.startswith(b'-'):
2708 if line.startswith(b'-'):
2709 label = b'diff.deleted'
2709 label = b'diff.deleted'
2710 elif line.startswith(b'+'):
2710 elif line.startswith(b'+'):
2711 label = b'diff.inserted'
2711 label = b'diff.inserted'
2712 else:
2712 else:
2713 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2713 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2714 for token in tabsplitter.findall(stripline):
2714 for token in tabsplitter.findall(stripline):
2715 if token.startswith(b'\t'):
2715 if token.startswith(b'\t'):
2716 yield (token, b'diff.tab')
2716 yield (token, b'diff.tab')
2717 else:
2717 else:
2718 yield (token, label)
2718 yield (token, label)
2719
2719
2720 if chompline != stripline:
2720 if chompline != stripline:
2721 yield (chompline[len(stripline) :], b'diff.trailingwhitespace')
2721 yield (chompline[len(stripline) :], b'diff.trailingwhitespace')
2722 if chompline != line:
2722 if chompline != line:
2723 yield (line[len(chompline) :], b'')
2723 yield (line[len(chompline) :], b'')
2724
2724
2725
2725
2726 def diffsinglehunkinline(hunklines):
2726 def diffsinglehunkinline(hunklines):
2727 """yield tokens for a list of lines in a single hunk, with inline colors"""
2727 """yield tokens for a list of lines in a single hunk, with inline colors"""
2728 # prepare deleted, and inserted content
2728 # prepare deleted, and inserted content
2729 a = bytearray()
2729 a = bytearray()
2730 b = bytearray()
2730 b = bytearray()
2731 for line in hunklines:
2731 for line in hunklines:
2732 if line[0:1] == b'-':
2732 if line[0:1] == b'-':
2733 a += line[1:]
2733 a += line[1:]
2734 elif line[0:1] == b'+':
2734 elif line[0:1] == b'+':
2735 b += line[1:]
2735 b += line[1:]
2736 else:
2736 else:
2737 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2737 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2738 # fast path: if either side is empty, use diffsinglehunk
2738 # fast path: if either side is empty, use diffsinglehunk
2739 if not a or not b:
2739 if not a or not b:
2740 for t in diffsinglehunk(hunklines):
2740 for t in diffsinglehunk(hunklines):
2741 yield t
2741 yield t
2742 return
2742 return
2743 # re-split the content into words
2743 # re-split the content into words
2744 al = wordsplitter.findall(bytes(a))
2744 al = wordsplitter.findall(bytes(a))
2745 bl = wordsplitter.findall(bytes(b))
2745 bl = wordsplitter.findall(bytes(b))
2746 # re-arrange the words to lines since the diff algorithm is line-based
2746 # re-arrange the words to lines since the diff algorithm is line-based
2747 aln = [s if s == b'\n' else s + b'\n' for s in al]
2747 aln = [s if s == b'\n' else s + b'\n' for s in al]
2748 bln = [s if s == b'\n' else s + b'\n' for s in bl]
2748 bln = [s if s == b'\n' else s + b'\n' for s in bl]
2749 an = b''.join(aln)
2749 an = b''.join(aln)
2750 bn = b''.join(bln)
2750 bn = b''.join(bln)
2751 # run the diff algorithm, prepare atokens and btokens
2751 # run the diff algorithm, prepare atokens and btokens
2752 atokens = []
2752 atokens = []
2753 btokens = []
2753 btokens = []
2754 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2754 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2755 for (a1, a2, b1, b2), btype in blocks:
2755 for (a1, a2, b1, b2), btype in blocks:
2756 changed = btype == b'!'
2756 changed = btype == b'!'
2757 for token in mdiff.splitnewlines(b''.join(al[a1:a2])):
2757 for token in mdiff.splitnewlines(b''.join(al[a1:a2])):
2758 atokens.append((changed, token))
2758 atokens.append((changed, token))
2759 for token in mdiff.splitnewlines(b''.join(bl[b1:b2])):
2759 for token in mdiff.splitnewlines(b''.join(bl[b1:b2])):
2760 btokens.append((changed, token))
2760 btokens.append((changed, token))
2761
2761
2762 # yield deleted tokens, then inserted ones
2762 # yield deleted tokens, then inserted ones
2763 for prefix, label, tokens in [
2763 for prefix, label, tokens in [
2764 (b'-', b'diff.deleted', atokens),
2764 (b'-', b'diff.deleted', atokens),
2765 (b'+', b'diff.inserted', btokens),
2765 (b'+', b'diff.inserted', btokens),
2766 ]:
2766 ]:
2767 nextisnewline = True
2767 nextisnewline = True
2768 for changed, token in tokens:
2768 for changed, token in tokens:
2769 if nextisnewline:
2769 if nextisnewline:
2770 yield (prefix, label)
2770 yield (prefix, label)
2771 nextisnewline = False
2771 nextisnewline = False
2772 # special handling line end
2772 # special handling line end
2773 isendofline = token.endswith(b'\n')
2773 isendofline = token.endswith(b'\n')
2774 if isendofline:
2774 if isendofline:
2775 chomp = token[:-1] # chomp
2775 chomp = token[:-1] # chomp
2776 if chomp.endswith(b'\r'):
2776 if chomp.endswith(b'\r'):
2777 chomp = chomp[:-1]
2777 chomp = chomp[:-1]
2778 endofline = token[len(chomp) :]
2778 endofline = token[len(chomp) :]
2779 token = chomp.rstrip() # detect spaces at the end
2779 token = chomp.rstrip() # detect spaces at the end
2780 endspaces = chomp[len(token) :]
2780 endspaces = chomp[len(token) :]
2781 # scan tabs
2781 # scan tabs
2782 for maybetab in tabsplitter.findall(token):
2782 for maybetab in tabsplitter.findall(token):
2783 if b'\t' == maybetab[0:1]:
2783 if b'\t' == maybetab[0:1]:
2784 currentlabel = b'diff.tab'
2784 currentlabel = b'diff.tab'
2785 else:
2785 else:
2786 if changed:
2786 if changed:
2787 currentlabel = label + b'.changed'
2787 currentlabel = label + b'.changed'
2788 else:
2788 else:
2789 currentlabel = label + b'.unchanged'
2789 currentlabel = label + b'.unchanged'
2790 yield (maybetab, currentlabel)
2790 yield (maybetab, currentlabel)
2791 if isendofline:
2791 if isendofline:
2792 if endspaces:
2792 if endspaces:
2793 yield (endspaces, b'diff.trailingwhitespace')
2793 yield (endspaces, b'diff.trailingwhitespace')
2794 yield (endofline, b'')
2794 yield (endofline, b'')
2795 nextisnewline = True
2795 nextisnewline = True
2796
2796
2797
2797
2798 def difflabel(func, *args, **kw):
2798 def difflabel(func, *args, **kw):
2799 '''yields 2-tuples of (output, label) based on the output of func()'''
2799 '''yields 2-tuples of (output, label) based on the output of func()'''
2800 if kw.get('opts') and kw['opts'].worddiff:
2800 if kw.get('opts') and kw['opts'].worddiff:
2801 dodiffhunk = diffsinglehunkinline
2801 dodiffhunk = diffsinglehunkinline
2802 else:
2802 else:
2803 dodiffhunk = diffsinglehunk
2803 dodiffhunk = diffsinglehunk
2804 headprefixes = [
2804 headprefixes = [
2805 (b'diff', b'diff.diffline'),
2805 (b'diff', b'diff.diffline'),
2806 (b'copy', b'diff.extended'),
2806 (b'copy', b'diff.extended'),
2807 (b'rename', b'diff.extended'),
2807 (b'rename', b'diff.extended'),
2808 (b'old', b'diff.extended'),
2808 (b'old', b'diff.extended'),
2809 (b'new', b'diff.extended'),
2809 (b'new', b'diff.extended'),
2810 (b'deleted', b'diff.extended'),
2810 (b'deleted', b'diff.extended'),
2811 (b'index', b'diff.extended'),
2811 (b'index', b'diff.extended'),
2812 (b'similarity', b'diff.extended'),
2812 (b'similarity', b'diff.extended'),
2813 (b'---', b'diff.file_a'),
2813 (b'---', b'diff.file_a'),
2814 (b'+++', b'diff.file_b'),
2814 (b'+++', b'diff.file_b'),
2815 ]
2815 ]
2816 textprefixes = [
2816 textprefixes = [
2817 (b'@', b'diff.hunk'),
2817 (b'@', b'diff.hunk'),
2818 # - and + are handled by diffsinglehunk
2818 # - and + are handled by diffsinglehunk
2819 ]
2819 ]
2820 head = False
2820 head = False
2821
2821
2822 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2822 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2823 hunkbuffer = []
2823 hunkbuffer = []
2824
2824
2825 def consumehunkbuffer():
2825 def consumehunkbuffer():
2826 if hunkbuffer:
2826 if hunkbuffer:
2827 for token in dodiffhunk(hunkbuffer):
2827 for token in dodiffhunk(hunkbuffer):
2828 yield token
2828 yield token
2829 hunkbuffer[:] = []
2829 hunkbuffer[:] = []
2830
2830
2831 for chunk in func(*args, **kw):
2831 for chunk in func(*args, **kw):
2832 lines = chunk.split(b'\n')
2832 lines = chunk.split(b'\n')
2833 linecount = len(lines)
2833 linecount = len(lines)
2834 for i, line in enumerate(lines):
2834 for i, line in enumerate(lines):
2835 if head:
2835 if head:
2836 if line.startswith(b'@'):
2836 if line.startswith(b'@'):
2837 head = False
2837 head = False
2838 else:
2838 else:
2839 if line and not line.startswith(
2839 if line and not line.startswith(
2840 (b' ', b'+', b'-', b'@', b'\\')
2840 (b' ', b'+', b'-', b'@', b'\\')
2841 ):
2841 ):
2842 head = True
2842 head = True
2843 diffline = False
2843 diffline = False
2844 if not head and line and line.startswith((b'+', b'-')):
2844 if not head and line and line.startswith((b'+', b'-')):
2845 diffline = True
2845 diffline = True
2846
2846
2847 prefixes = textprefixes
2847 prefixes = textprefixes
2848 if head:
2848 if head:
2849 prefixes = headprefixes
2849 prefixes = headprefixes
2850 if diffline:
2850 if diffline:
2851 # buffered
2851 # buffered
2852 bufferedline = line
2852 bufferedline = line
2853 if i + 1 < linecount:
2853 if i + 1 < linecount:
2854 bufferedline += b"\n"
2854 bufferedline += b"\n"
2855 hunkbuffer.append(bufferedline)
2855 hunkbuffer.append(bufferedline)
2856 else:
2856 else:
2857 # unbuffered
2857 # unbuffered
2858 for token in consumehunkbuffer():
2858 for token in consumehunkbuffer():
2859 yield token
2859 yield token
2860 stripline = line.rstrip()
2860 stripline = line.rstrip()
2861 for prefix, label in prefixes:
2861 for prefix, label in prefixes:
2862 if stripline.startswith(prefix):
2862 if stripline.startswith(prefix):
2863 yield (stripline, label)
2863 yield (stripline, label)
2864 if line != stripline:
2864 if line != stripline:
2865 yield (
2865 yield (
2866 line[len(stripline) :],
2866 line[len(stripline) :],
2867 b'diff.trailingwhitespace',
2867 b'diff.trailingwhitespace',
2868 )
2868 )
2869 break
2869 break
2870 else:
2870 else:
2871 yield (line, b'')
2871 yield (line, b'')
2872 if i + 1 < linecount:
2872 if i + 1 < linecount:
2873 yield (b'\n', b'')
2873 yield (b'\n', b'')
2874 for token in consumehunkbuffer():
2874 for token in consumehunkbuffer():
2875 yield token
2875 yield token
2876
2876
2877
2877
2878 def diffui(*args, **kw):
2878 def diffui(*args, **kw):
2879 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2879 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2880 return difflabel(diff, *args, **kw)
2880 return difflabel(diff, *args, **kw)
2881
2881
2882
2882
2883 def _filepairs(modified, added, removed, copy, opts):
2883 def _filepairs(modified, added, removed, copy, opts):
2884 """generates tuples (f1, f2, copyop), where f1 is the name of the file
2884 """generates tuples (f1, f2, copyop), where f1 is the name of the file
2885 before and f2 is the the name after. For added files, f1 will be None,
2885 before and f2 is the the name after. For added files, f1 will be None,
2886 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2886 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2887 or 'rename' (the latter two only if opts.git is set)."""
2887 or 'rename' (the latter two only if opts.git is set)."""
2888 gone = set()
2888 gone = set()
2889
2889
2890 copyto = {v: k for k, v in copy.items()}
2890 copyto = {v: k for k, v in copy.items()}
2891
2891
2892 addedset, removedset = set(added), set(removed)
2892 addedset, removedset = set(added), set(removed)
2893
2893
2894 for f in sorted(modified + added + removed):
2894 for f in sorted(modified + added + removed):
2895 copyop = None
2895 copyop = None
2896 f1, f2 = f, f
2896 f1, f2 = f, f
2897 if f in addedset:
2897 if f in addedset:
2898 f1 = None
2898 f1 = None
2899 if f in copy:
2899 if f in copy:
2900 if opts.git:
2900 if opts.git:
2901 f1 = copy[f]
2901 f1 = copy[f]
2902 if f1 in removedset and f1 not in gone:
2902 if f1 in removedset and f1 not in gone:
2903 copyop = b'rename'
2903 copyop = b'rename'
2904 gone.add(f1)
2904 gone.add(f1)
2905 else:
2905 else:
2906 copyop = b'copy'
2906 copyop = b'copy'
2907 elif f in removedset:
2907 elif f in removedset:
2908 f2 = None
2908 f2 = None
2909 if opts.git:
2909 if opts.git:
2910 # have we already reported a copy above?
2910 # have we already reported a copy above?
2911 if (
2911 if (
2912 f in copyto
2912 f in copyto
2913 and copyto[f] in addedset
2913 and copyto[f] in addedset
2914 and copy[copyto[f]] == f
2914 and copy[copyto[f]] == f
2915 ):
2915 ):
2916 continue
2916 continue
2917 yield f1, f2, copyop
2917 yield f1, f2, copyop
2918
2918
2919
2919
2920 def _gitindex(text):
2920 def _gitindex(text):
2921 if not text:
2921 if not text:
2922 text = b""
2922 text = b""
2923 l = len(text)
2923 l = len(text)
2924 s = hashutil.sha1(b'blob %d\0' % l)
2924 s = hashutil.sha1(b'blob %d\0' % l)
2925 s.update(text)
2925 s.update(text)
2926 return hex(s.digest())
2926 return hex(s.digest())
2927
2927
2928
2928
2929 _gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
2929 _gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
2930
2930
2931
2931
2932 def trydiff(
2932 def trydiff(
2933 repo,
2933 repo,
2934 revs,
2934 revs,
2935 ctx1,
2935 ctx1,
2936 ctx2,
2936 ctx2,
2937 modified,
2937 modified,
2938 added,
2938 added,
2939 removed,
2939 removed,
2940 copy,
2940 copy,
2941 getfilectx,
2941 getfilectx,
2942 opts,
2942 opts,
2943 losedatafn,
2943 losedatafn,
2944 pathfn,
2944 pathfn,
2945 ):
2945 ):
2946 """given input data, generate a diff and yield it in blocks
2946 """given input data, generate a diff and yield it in blocks
2947
2947
2948 If generating a diff would lose data like flags or binary data and
2948 If generating a diff would lose data like flags or binary data and
2949 losedatafn is not None, it will be called.
2949 losedatafn is not None, it will be called.
2950
2950
2951 pathfn is applied to every path in the diff output.
2951 pathfn is applied to every path in the diff output.
2952 """
2952 """
2953
2953
2954 if opts.noprefix:
2954 if opts.noprefix:
2955 aprefix = bprefix = b''
2955 aprefix = bprefix = b''
2956 else:
2956 else:
2957 aprefix = b'a/'
2957 aprefix = b'a/'
2958 bprefix = b'b/'
2958 bprefix = b'b/'
2959
2959
2960 def diffline(f, revs):
2960 def diffline(f, revs):
2961 revinfo = b' '.join([b"-r %s" % rev for rev in revs])
2961 revinfo = b' '.join([b"-r %s" % rev for rev in revs])
2962 return b'diff %s %s' % (revinfo, f)
2962 return b'diff %s %s' % (revinfo, f)
2963
2963
2964 def isempty(fctx):
2964 def isempty(fctx):
2965 return fctx is None or fctx.size() == 0
2965 return fctx is None or fctx.size() == 0
2966
2966
2967 date1 = dateutil.datestr(ctx1.date())
2967 date1 = dateutil.datestr(ctx1.date())
2968 date2 = dateutil.datestr(ctx2.date())
2968 date2 = dateutil.datestr(ctx2.date())
2969
2969
2970 if not pathfn:
2970 if not pathfn:
2971 pathfn = lambda f: f
2971 pathfn = lambda f: f
2972
2972
2973 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2973 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2974 content1 = None
2974 content1 = None
2975 content2 = None
2975 content2 = None
2976 fctx1 = None
2976 fctx1 = None
2977 fctx2 = None
2977 fctx2 = None
2978 flag1 = None
2978 flag1 = None
2979 flag2 = None
2979 flag2 = None
2980 if f1:
2980 if f1:
2981 fctx1 = getfilectx(f1, ctx1)
2981 fctx1 = getfilectx(f1, ctx1)
2982 if opts.git or losedatafn:
2982 if opts.git or losedatafn:
2983 flag1 = ctx1.flags(f1)
2983 flag1 = ctx1.flags(f1)
2984 if f2:
2984 if f2:
2985 fctx2 = getfilectx(f2, ctx2)
2985 fctx2 = getfilectx(f2, ctx2)
2986 if opts.git or losedatafn:
2986 if opts.git or losedatafn:
2987 flag2 = ctx2.flags(f2)
2987 flag2 = ctx2.flags(f2)
2988 # if binary is True, output "summary" or "base85", but not "text diff"
2988 # if binary is True, output "summary" or "base85", but not "text diff"
2989 if opts.text:
2989 if opts.text:
2990 binary = False
2990 binary = False
2991 else:
2991 else:
2992 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2992 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2993
2993
2994 if losedatafn and not opts.git:
2994 if losedatafn and not opts.git:
2995 if (
2995 if (
2996 binary
2996 binary
2997 or
2997 or
2998 # copy/rename
2998 # copy/rename
2999 f2 in copy
2999 f2 in copy
3000 or
3000 or
3001 # empty file creation
3001 # empty file creation
3002 (not f1 and isempty(fctx2))
3002 (not f1 and isempty(fctx2))
3003 or
3003 or
3004 # empty file deletion
3004 # empty file deletion
3005 (isempty(fctx1) and not f2)
3005 (isempty(fctx1) and not f2)
3006 or
3006 or
3007 # create with flags
3007 # create with flags
3008 (not f1 and flag2)
3008 (not f1 and flag2)
3009 or
3009 or
3010 # change flags
3010 # change flags
3011 (f1 and f2 and flag1 != flag2)
3011 (f1 and f2 and flag1 != flag2)
3012 ):
3012 ):
3013 losedatafn(f2 or f1)
3013 losedatafn(f2 or f1)
3014
3014
3015 path1 = pathfn(f1 or f2)
3015 path1 = pathfn(f1 or f2)
3016 path2 = pathfn(f2 or f1)
3016 path2 = pathfn(f2 or f1)
3017 header = []
3017 header = []
3018 if opts.git:
3018 if opts.git:
3019 header.append(
3019 header.append(
3020 b'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
3020 b'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
3021 )
3021 )
3022 if not f1: # added
3022 if not f1: # added
3023 header.append(b'new file mode %s' % _gitmode[flag2])
3023 header.append(b'new file mode %s' % _gitmode[flag2])
3024 elif not f2: # removed
3024 elif not f2: # removed
3025 header.append(b'deleted file mode %s' % _gitmode[flag1])
3025 header.append(b'deleted file mode %s' % _gitmode[flag1])
3026 else: # modified/copied/renamed
3026 else: # modified/copied/renamed
3027 mode1, mode2 = _gitmode[flag1], _gitmode[flag2]
3027 mode1, mode2 = _gitmode[flag1], _gitmode[flag2]
3028 if mode1 != mode2:
3028 if mode1 != mode2:
3029 header.append(b'old mode %s' % mode1)
3029 header.append(b'old mode %s' % mode1)
3030 header.append(b'new mode %s' % mode2)
3030 header.append(b'new mode %s' % mode2)
3031 if copyop is not None:
3031 if copyop is not None:
3032 if opts.showsimilarity:
3032 if opts.showsimilarity:
3033 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
3033 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
3034 header.append(b'similarity index %d%%' % sim)
3034 header.append(b'similarity index %d%%' % sim)
3035 header.append(b'%s from %s' % (copyop, path1))
3035 header.append(b'%s from %s' % (copyop, path1))
3036 header.append(b'%s to %s' % (copyop, path2))
3036 header.append(b'%s to %s' % (copyop, path2))
3037 elif revs:
3037 elif revs:
3038 header.append(diffline(path1, revs))
3038 header.append(diffline(path1, revs))
3039
3039
3040 # fctx.is | diffopts | what to | is fctx.data()
3040 # fctx.is | diffopts | what to | is fctx.data()
3041 # binary() | text nobinary git index | output? | outputted?
3041 # binary() | text nobinary git index | output? | outputted?
3042 # ------------------------------------|----------------------------
3042 # ------------------------------------|----------------------------
3043 # yes | no no no * | summary | no
3043 # yes | no no no * | summary | no
3044 # yes | no no yes * | base85 | yes
3044 # yes | no no yes * | base85 | yes
3045 # yes | no yes no * | summary | no
3045 # yes | no yes no * | summary | no
3046 # yes | no yes yes 0 | summary | no
3046 # yes | no yes yes 0 | summary | no
3047 # yes | no yes yes >0 | summary | semi [1]
3047 # yes | no yes yes >0 | summary | semi [1]
3048 # yes | yes * * * | text diff | yes
3048 # yes | yes * * * | text diff | yes
3049 # no | * * * * | text diff | yes
3049 # no | * * * * | text diff | yes
3050 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
3050 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
3051 if binary and (
3051 if binary and (
3052 not opts.git or (opts.git and opts.nobinary and not opts.index)
3052 not opts.git or (opts.git and opts.nobinary and not opts.index)
3053 ):
3053 ):
3054 # fast path: no binary content will be displayed, content1 and
3054 # fast path: no binary content will be displayed, content1 and
3055 # content2 are only used for equivalent test. cmp() could have a
3055 # content2 are only used for equivalent test. cmp() could have a
3056 # fast path.
3056 # fast path.
3057 if fctx1 is not None:
3057 if fctx1 is not None:
3058 content1 = b'\0'
3058 content1 = b'\0'
3059 if fctx2 is not None:
3059 if fctx2 is not None:
3060 if fctx1 is not None and not fctx1.cmp(fctx2):
3060 if fctx1 is not None and not fctx1.cmp(fctx2):
3061 content2 = b'\0' # not different
3061 content2 = b'\0' # not different
3062 else:
3062 else:
3063 content2 = b'\0\0'
3063 content2 = b'\0\0'
3064 else:
3064 else:
3065 # normal path: load contents
3065 # normal path: load contents
3066 if fctx1 is not None:
3066 if fctx1 is not None:
3067 content1 = fctx1.data()
3067 content1 = fctx1.data()
3068 if fctx2 is not None:
3068 if fctx2 is not None:
3069 content2 = fctx2.data()
3069 content2 = fctx2.data()
3070
3070
3071 data1 = (ctx1, fctx1, path1, flag1, content1, date1)
3071 data1 = (ctx1, fctx1, path1, flag1, content1, date1)
3072 data2 = (ctx2, fctx2, path2, flag2, content2, date2)
3072 data2 = (ctx2, fctx2, path2, flag2, content2, date2)
3073 yield diffcontent(data1, data2, header, binary, opts)
3073 yield diffcontent(data1, data2, header, binary, opts)
3074
3074
3075
3075
3076 def diffcontent(data1, data2, header, binary, opts):
3076 def diffcontent(data1, data2, header, binary, opts):
3077 """diffs two versions of a file.
3077 """diffs two versions of a file.
3078
3078
3079 data1 and data2 are tuples containg:
3079 data1 and data2 are tuples containg:
3080
3080
3081 * ctx: changeset for the file
3081 * ctx: changeset for the file
3082 * fctx: file context for that file
3082 * fctx: file context for that file
3083 * path1: name of the file
3083 * path1: name of the file
3084 * flag: flags of the file
3084 * flag: flags of the file
3085 * content: full content of the file (can be null in case of binary)
3085 * content: full content of the file (can be null in case of binary)
3086 * date: date of the changeset
3086 * date: date of the changeset
3087
3087
3088 header: the patch header
3088 header: the patch header
3089 binary: whether the any of the version of file is binary or not
3089 binary: whether the any of the version of file is binary or not
3090 opts: user passed options
3090 opts: user passed options
3091
3091
3092 It exists as a separate function so that extensions like extdiff can wrap
3092 It exists as a separate function so that extensions like extdiff can wrap
3093 it and use the file content directly.
3093 it and use the file content directly.
3094 """
3094 """
3095
3095
3096 ctx1, fctx1, path1, flag1, content1, date1 = data1
3096 ctx1, fctx1, path1, flag1, content1, date1 = data1
3097 ctx2, fctx2, path2, flag2, content2, date2 = data2
3097 ctx2, fctx2, path2, flag2, content2, date2 = data2
3098 index1 = _gitindex(content1) if path1 in ctx1 else sha1nodeconstants.nullhex
3098 index1 = _gitindex(content1) if path1 in ctx1 else sha1nodeconstants.nullhex
3099 index2 = _gitindex(content2) if path2 in ctx2 else sha1nodeconstants.nullhex
3099 index2 = _gitindex(content2) if path2 in ctx2 else sha1nodeconstants.nullhex
3100 if binary and opts.git and not opts.nobinary:
3100 if binary and opts.git and not opts.nobinary:
3101 text = mdiff.b85diff(content1, content2)
3101 text = mdiff.b85diff(content1, content2)
3102 if text:
3102 if text:
3103 header.append(b'index %s..%s' % (index1, index2))
3103 header.append(b'index %s..%s' % (index1, index2))
3104 hunks = ((None, [text]),)
3104 hunks = ((None, [text]),)
3105 else:
3105 else:
3106 if opts.git and opts.index > 0:
3106 if opts.git and opts.index > 0:
3107 flag = flag1
3107 flag = flag1
3108 if flag is None:
3108 if flag is None:
3109 flag = flag2
3109 flag = flag2
3110 header.append(
3110 header.append(
3111 b'index %s..%s %s'
3111 b'index %s..%s %s'
3112 % (
3112 % (
3113 index1[0 : opts.index],
3113 index1[0 : opts.index],
3114 index2[0 : opts.index],
3114 index2[0 : opts.index],
3115 _gitmode[flag],
3115 _gitmode[flag],
3116 )
3116 )
3117 )
3117 )
3118
3118
3119 uheaders, hunks = mdiff.unidiff(
3119 uheaders, hunks = mdiff.unidiff(
3120 content1,
3120 content1,
3121 date1,
3121 date1,
3122 content2,
3122 content2,
3123 date2,
3123 date2,
3124 path1,
3124 path1,
3125 path2,
3125 path2,
3126 binary=binary,
3126 binary=binary,
3127 opts=opts,
3127 opts=opts,
3128 )
3128 )
3129 header.extend(uheaders)
3129 header.extend(uheaders)
3130 return fctx1, fctx2, header, hunks
3130 return fctx1, fctx2, header, hunks
3131
3131
3132
3132
3133 def diffstatsum(stats):
3133 def diffstatsum(stats):
3134 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
3134 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
3135 for f, a, r, b in stats:
3135 for f, a, r, b in stats:
3136 maxfile = max(maxfile, encoding.colwidth(f))
3136 maxfile = max(maxfile, encoding.colwidth(f))
3137 maxtotal = max(maxtotal, a + r)
3137 maxtotal = max(maxtotal, a + r)
3138 addtotal += a
3138 addtotal += a
3139 removetotal += r
3139 removetotal += r
3140 binary = binary or b
3140 binary = binary or b
3141
3141
3142 return maxfile, maxtotal, addtotal, removetotal, binary
3142 return maxfile, maxtotal, addtotal, removetotal, binary
3143
3143
3144
3144
3145 def diffstatdata(lines):
3145 def diffstatdata(lines):
3146 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
3146 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
3147
3147
3148 results = []
3148 results = []
3149 filename, adds, removes, isbinary = None, 0, 0, False
3149 filename, adds, removes, isbinary = None, 0, 0, False
3150
3150
3151 def addresult():
3151 def addresult():
3152 if filename:
3152 if filename:
3153 results.append((filename, adds, removes, isbinary))
3153 results.append((filename, adds, removes, isbinary))
3154
3154
3155 # inheader is used to track if a line is in the
3155 # inheader is used to track if a line is in the
3156 # header portion of the diff. This helps properly account
3156 # header portion of the diff. This helps properly account
3157 # for lines that start with '--' or '++'
3157 # for lines that start with '--' or '++'
3158 inheader = False
3158 inheader = False
3159
3159
3160 for line in lines:
3160 for line in lines:
3161 if line.startswith(b'diff'):
3161 if line.startswith(b'diff'):
3162 addresult()
3162 addresult()
3163 # starting a new file diff
3163 # starting a new file diff
3164 # set numbers to 0 and reset inheader
3164 # set numbers to 0 and reset inheader
3165 inheader = True
3165 inheader = True
3166 adds, removes, isbinary = 0, 0, False
3166 adds, removes, isbinary = 0, 0, False
3167 if line.startswith(b'diff --git a/'):
3167 if line.startswith(b'diff --git a/'):
3168 filename = gitre.search(line).group(2)
3168 filename = gitre.search(line).group(2)
3169 elif line.startswith(b'diff -r'):
3169 elif line.startswith(b'diff -r'):
3170 # format: "diff -r ... -r ... filename"
3170 # format: "diff -r ... -r ... filename"
3171 filename = diffre.search(line).group(1)
3171 filename = diffre.search(line).group(1)
3172 elif line.startswith(b'@@'):
3172 elif line.startswith(b'@@'):
3173 inheader = False
3173 inheader = False
3174 elif line.startswith(b'+') and not inheader:
3174 elif line.startswith(b'+') and not inheader:
3175 adds += 1
3175 adds += 1
3176 elif line.startswith(b'-') and not inheader:
3176 elif line.startswith(b'-') and not inheader:
3177 removes += 1
3177 removes += 1
3178 elif line.startswith(b'GIT binary patch') or line.startswith(
3178 elif line.startswith(b'GIT binary patch') or line.startswith(
3179 b'Binary file'
3179 b'Binary file'
3180 ):
3180 ):
3181 isbinary = True
3181 isbinary = True
3182 elif line.startswith(b'rename from'):
3182 elif line.startswith(b'rename from'):
3183 filename = line[12:]
3183 filename = line[12:]
3184 elif line.startswith(b'rename to'):
3184 elif line.startswith(b'rename to'):
3185 filename += b' => %s' % line[10:]
3185 filename += b' => %s' % line[10:]
3186 addresult()
3186 addresult()
3187 return results
3187 return results
3188
3188
3189
3189
3190 def diffstat(lines, width=80):
3190 def diffstat(lines, width=80):
3191 output = []
3191 output = []
3192 stats = diffstatdata(lines)
3192 stats = diffstatdata(lines)
3193 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
3193 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
3194
3194
3195 countwidth = len(str(maxtotal))
3195 countwidth = len(str(maxtotal))
3196 if hasbinary and countwidth < 3:
3196 if hasbinary and countwidth < 3:
3197 countwidth = 3
3197 countwidth = 3
3198 graphwidth = width - countwidth - maxname - 6
3198 graphwidth = width - countwidth - maxname - 6
3199 if graphwidth < 10:
3199 if graphwidth < 10:
3200 graphwidth = 10
3200 graphwidth = 10
3201
3201
3202 def scale(i):
3202 def scale(i):
3203 if maxtotal <= graphwidth:
3203 if maxtotal <= graphwidth:
3204 return i
3204 return i
3205 # If diffstat runs out of room it doesn't print anything,
3205 # If diffstat runs out of room it doesn't print anything,
3206 # which isn't very useful, so always print at least one + or -
3206 # which isn't very useful, so always print at least one + or -
3207 # if there were at least some changes.
3207 # if there were at least some changes.
3208 return max(i * graphwidth // maxtotal, int(bool(i)))
3208 return max(i * graphwidth // maxtotal, int(bool(i)))
3209
3209
3210 for filename, adds, removes, isbinary in stats:
3210 for filename, adds, removes, isbinary in stats:
3211 if isbinary:
3211 if isbinary:
3212 count = b'Bin'
3212 count = b'Bin'
3213 else:
3213 else:
3214 count = b'%d' % (adds + removes)
3214 count = b'%d' % (adds + removes)
3215 pluses = b'+' * scale(adds)
3215 pluses = b'+' * scale(adds)
3216 minuses = b'-' * scale(removes)
3216 minuses = b'-' * scale(removes)
3217 output.append(
3217 output.append(
3218 b' %s%s | %*s %s%s\n'
3218 b' %s%s | %*s %s%s\n'
3219 % (
3219 % (
3220 filename,
3220 filename,
3221 b' ' * (maxname - encoding.colwidth(filename)),
3221 b' ' * (maxname - encoding.colwidth(filename)),
3222 countwidth,
3222 countwidth,
3223 count,
3223 count,
3224 pluses,
3224 pluses,
3225 minuses,
3225 minuses,
3226 )
3226 )
3227 )
3227 )
3228
3228
3229 if stats:
3229 if stats:
3230 output.append(
3230 output.append(
3231 _(b' %d files changed, %d insertions(+), %d deletions(-)\n')
3231 _(b' %d files changed, %d insertions(+), %d deletions(-)\n')
3232 % (len(stats), totaladds, totalremoves)
3232 % (len(stats), totaladds, totalremoves)
3233 )
3233 )
3234
3234
3235 return b''.join(output)
3235 return b''.join(output)
3236
3236
3237
3237
3238 def diffstatui(*args, **kw):
3238 def diffstatui(*args, **kw):
3239 """like diffstat(), but yields 2-tuples of (output, label) for
3239 """like diffstat(), but yields 2-tuples of (output, label) for
3240 ui.write()
3240 ui.write()
3241 """
3241 """
3242
3242
3243 for line in diffstat(*args, **kw).splitlines():
3243 for line in diffstat(*args, **kw).splitlines():
3244 if line and line[-1] in b'+-':
3244 if line and line[-1] in b'+-':
3245 name, graph = line.rsplit(b' ', 1)
3245 name, graph = line.rsplit(b' ', 1)
3246 yield (name + b' ', b'')
3246 yield (name + b' ', b'')
3247 m = re.search(br'\++', graph)
3247 m = re.search(br'\++', graph)
3248 if m:
3248 if m:
3249 yield (m.group(0), b'diffstat.inserted')
3249 yield (m.group(0), b'diffstat.inserted')
3250 m = re.search(br'-+', graph)
3250 m = re.search(br'-+', graph)
3251 if m:
3251 if m:
3252 yield (m.group(0), b'diffstat.deleted')
3252 yield (m.group(0), b'diffstat.deleted')
3253 else:
3253 else:
3254 yield (line, b'')
3254 yield (line, b'')
3255 yield (b'\n', b'')
3255 yield (b'\n', b'')
@@ -1,811 +1,812
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import errno
9 import errno
10 import fcntl
10 import fcntl
11 import getpass
11 import getpass
12 import grp
12 import grp
13 import os
13 import os
14 import pwd
14 import pwd
15 import re
15 import re
16 import select
16 import select
17 import stat
17 import stat
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import typing
20 import typing
21 import unicodedata
21 import unicodedata
22
22
23 from typing import (
23 from typing import (
24 Any,
24 Any,
25 AnyStr,
25 AnyStr,
26 Callable,
26 Iterable,
27 Iterable,
27 Iterator,
28 Iterator,
28 List,
29 List,
29 Match,
30 Match,
30 NoReturn,
31 NoReturn,
31 Optional,
32 Optional,
32 Sequence,
33 Sequence,
33 Tuple,
34 Tuple,
34 Union,
35 Union,
35 )
36 )
36
37
37 from .i18n import _
38 from .i18n import _
38 from .pycompat import (
39 from .pycompat import (
39 open,
40 open,
40 )
41 )
41 from . import (
42 from . import (
42 encoding,
43 encoding,
43 error,
44 error,
44 policy,
45 policy,
45 pycompat,
46 pycompat,
46 )
47 )
47
48
48 osutil = policy.importmod('osutil')
49 osutil = policy.importmod('osutil')
49
50
50 normpath = os.path.normpath
51 normpath = os.path.normpath
51 samestat = os.path.samestat
52 samestat = os.path.samestat
52 abspath = os.path.abspath # re-exports
53 abspath = os.path.abspath # re-exports
53
54
54 try:
55 try:
55 oslink = os.link
56 oslink = os.link
56 except AttributeError:
57 except AttributeError:
57 # Some platforms build Python without os.link on systems that are
58 # Some platforms build Python without os.link on systems that are
58 # vaguely unix-like but don't have hardlink support. For those
59 # vaguely unix-like but don't have hardlink support. For those
59 # poor souls, just say we tried and that it failed so we fall back
60 # poor souls, just say we tried and that it failed so we fall back
60 # to copies.
61 # to copies.
61 def oslink(src: bytes, dst: bytes) -> NoReturn:
62 def oslink(src: bytes, dst: bytes) -> NoReturn:
62 raise OSError(
63 raise OSError(
63 errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
64 errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
64 )
65 )
65
66
66
67
67 readlink = os.readlink
68 readlink = os.readlink
68 unlink = os.unlink
69 unlink = os.unlink
69 rename = os.rename
70 rename = os.rename
70 removedirs = os.removedirs
71 removedirs = os.removedirs
71
72
72 if typing.TYPE_CHECKING:
73 if typing.TYPE_CHECKING:
73
74
74 def normpath(path: bytes) -> bytes:
75 def normpath(path: bytes) -> bytes:
75 raise NotImplementedError
76 raise NotImplementedError
76
77
77 def abspath(path: AnyStr) -> AnyStr:
78 def abspath(path: AnyStr) -> AnyStr:
78 raise NotImplementedError
79 raise NotImplementedError
79
80
80 def oslink(src: bytes, dst: bytes) -> None:
81 def oslink(src: bytes, dst: bytes) -> None:
81 raise NotImplementedError
82 raise NotImplementedError
82
83
83 def readlink(path: bytes) -> bytes:
84 def readlink(path: bytes) -> bytes:
84 raise NotImplementedError
85 raise NotImplementedError
85
86
86 def unlink(path: bytes) -> None:
87 def unlink(path: bytes) -> None:
87 raise NotImplementedError
88 raise NotImplementedError
88
89
89 def rename(src: bytes, dst: bytes) -> None:
90 def rename(src: bytes, dst: bytes) -> None:
90 raise NotImplementedError
91 raise NotImplementedError
91
92
92 def removedirs(name: bytes) -> None:
93 def removedirs(name: bytes) -> None:
93 raise NotImplementedError
94 raise NotImplementedError
94
95
95
96
96 expandglobs: bool = False
97 expandglobs: bool = False
97
98
98 umask: int = os.umask(0)
99 umask: int = os.umask(0)
99 os.umask(umask)
100 os.umask(umask)
100
101
101 posixfile = open
102 posixfile = open
102
103
103
104
104 def split(p: bytes) -> Tuple[bytes, bytes]:
105 def split(p: bytes) -> Tuple[bytes, bytes]:
105 """Same as posixpath.split, but faster
106 """Same as posixpath.split, but faster
106
107
107 >>> import posixpath
108 >>> import posixpath
108 >>> for f in [b'/absolute/path/to/file',
109 >>> for f in [b'/absolute/path/to/file',
109 ... b'relative/path/to/file',
110 ... b'relative/path/to/file',
110 ... b'file_alone',
111 ... b'file_alone',
111 ... b'path/to/directory/',
112 ... b'path/to/directory/',
112 ... b'/multiple/path//separators',
113 ... b'/multiple/path//separators',
113 ... b'/file_at_root',
114 ... b'/file_at_root',
114 ... b'///multiple_leading_separators_at_root',
115 ... b'///multiple_leading_separators_at_root',
115 ... b'']:
116 ... b'']:
116 ... assert split(f) == posixpath.split(f), f
117 ... assert split(f) == posixpath.split(f), f
117 """
118 """
118 ht = p.rsplit(b'/', 1)
119 ht = p.rsplit(b'/', 1)
119 if len(ht) == 1:
120 if len(ht) == 1:
120 return b'', p
121 return b'', p
121 nh = ht[0].rstrip(b'/')
122 nh = ht[0].rstrip(b'/')
122 if nh:
123 if nh:
123 return nh, ht[1]
124 return nh, ht[1]
124 return ht[0] + b'/', ht[1]
125 return ht[0] + b'/', ht[1]
125
126
126
127
127 def openhardlinks() -> bool:
128 def openhardlinks() -> bool:
128 '''return true if it is safe to hold open file handles to hardlinks'''
129 '''return true if it is safe to hold open file handles to hardlinks'''
129 return True
130 return True
130
131
131
132
132 def nlinks(name: bytes) -> int:
133 def nlinks(name: bytes) -> int:
133 '''return number of hardlinks for the given file'''
134 '''return number of hardlinks for the given file'''
134 return os.lstat(name).st_nlink
135 return os.lstat(name).st_nlink
135
136
136
137
137 def parsepatchoutput(output_line: bytes) -> bytes:
138 def parsepatchoutput(output_line: bytes) -> bytes:
138 """parses the output produced by patch and returns the filename"""
139 """parses the output produced by patch and returns the filename"""
139 pf = output_line[14:]
140 pf = output_line[14:]
140 if pycompat.sysplatform == b'OpenVMS':
141 if pycompat.sysplatform == b'OpenVMS':
141 if pf[0] == b'`':
142 if pf[0] == b'`':
142 pf = pf[1:-1] # Remove the quotes
143 pf = pf[1:-1] # Remove the quotes
143 else:
144 else:
144 if pf.startswith(b"'") and pf.endswith(b"'") and b" " in pf:
145 if pf.startswith(b"'") and pf.endswith(b"'") and b" " in pf:
145 pf = pf[1:-1] # Remove the quotes
146 pf = pf[1:-1] # Remove the quotes
146 return pf
147 return pf
147
148
148
149
149 def sshargs(
150 def sshargs(
150 sshcmd: bytes, host: bytes, user: Optional[bytes], port: Optional[bytes]
151 sshcmd: bytes, host: bytes, user: Optional[bytes], port: Optional[bytes]
151 ) -> bytes:
152 ) -> bytes:
152 '''Build argument list for ssh'''
153 '''Build argument list for ssh'''
153 args = user and (b"%s@%s" % (user, host)) or host
154 args = user and (b"%s@%s" % (user, host)) or host
154 if b'-' in args[:1]:
155 if b'-' in args[:1]:
155 raise error.Abort(
156 raise error.Abort(
156 _(b'illegal ssh hostname or username starting with -: %s') % args
157 _(b'illegal ssh hostname or username starting with -: %s') % args
157 )
158 )
158 args = shellquote(args)
159 args = shellquote(args)
159 if port:
160 if port:
160 args = b'-p %s %s' % (shellquote(port), args)
161 args = b'-p %s %s' % (shellquote(port), args)
161 return args
162 return args
162
163
163
164
164 def isexec(f: bytes) -> bool:
165 def isexec(f: bytes) -> bool:
165 """check whether a file is executable"""
166 """check whether a file is executable"""
166 return os.lstat(f).st_mode & 0o100 != 0
167 return os.lstat(f).st_mode & 0o100 != 0
167
168
168
169
169 def setflags(f: bytes, l: bool, x: bool) -> None:
170 def setflags(f: bytes, l: bool, x: bool) -> None:
170 st = os.lstat(f)
171 st = os.lstat(f)
171 s = st.st_mode
172 s = st.st_mode
172 if l:
173 if l:
173 if not stat.S_ISLNK(s):
174 if not stat.S_ISLNK(s):
174 # switch file to link
175 # switch file to link
175 with open(f, b'rb') as fp:
176 with open(f, b'rb') as fp:
176 data = fp.read()
177 data = fp.read()
177 unlink(f)
178 unlink(f)
178 try:
179 try:
179 os.symlink(data, f)
180 os.symlink(data, f)
180 except OSError:
181 except OSError:
181 # failed to make a link, rewrite file
182 # failed to make a link, rewrite file
182 with open(f, b"wb") as fp:
183 with open(f, b"wb") as fp:
183 fp.write(data)
184 fp.write(data)
184
185
185 # no chmod needed at this point
186 # no chmod needed at this point
186 return
187 return
187 if stat.S_ISLNK(s):
188 if stat.S_ISLNK(s):
188 # switch link to file
189 # switch link to file
189 data = os.readlink(f)
190 data = os.readlink(f)
190 unlink(f)
191 unlink(f)
191 with open(f, b"wb") as fp:
192 with open(f, b"wb") as fp:
192 fp.write(data)
193 fp.write(data)
193 s = 0o666 & ~umask # avoid restatting for chmod
194 s = 0o666 & ~umask # avoid restatting for chmod
194
195
195 sx = s & 0o100
196 sx = s & 0o100
196 if st.st_nlink > 1 and bool(x) != bool(sx):
197 if st.st_nlink > 1 and bool(x) != bool(sx):
197 # the file is a hardlink, break it
198 # the file is a hardlink, break it
198 with open(f, b"rb") as fp:
199 with open(f, b"rb") as fp:
199 data = fp.read()
200 data = fp.read()
200 unlink(f)
201 unlink(f)
201 with open(f, b"wb") as fp:
202 with open(f, b"wb") as fp:
202 fp.write(data)
203 fp.write(data)
203
204
204 if x and not sx:
205 if x and not sx:
205 # Turn on +x for every +r bit when making a file executable
206 # Turn on +x for every +r bit when making a file executable
206 # and obey umask.
207 # and obey umask.
207 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
208 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
208 elif not x and sx:
209 elif not x and sx:
209 # Turn off all +x bits
210 # Turn off all +x bits
210 os.chmod(f, s & 0o666)
211 os.chmod(f, s & 0o666)
211
212
212
213
213 def copymode(
214 def copymode(
214 src: bytes,
215 src: bytes,
215 dst: bytes,
216 dst: bytes,
216 mode: Optional[int] = None,
217 mode: Optional[int] = None,
217 enforcewritable: bool = False,
218 enforcewritable: bool = False,
218 ) -> None:
219 ) -> None:
219 """Copy the file mode from the file at path src to dst.
220 """Copy the file mode from the file at path src to dst.
220 If src doesn't exist, we're using mode instead. If mode is None, we're
221 If src doesn't exist, we're using mode instead. If mode is None, we're
221 using umask."""
222 using umask."""
222 try:
223 try:
223 st_mode = os.lstat(src).st_mode & 0o777
224 st_mode = os.lstat(src).st_mode & 0o777
224 except FileNotFoundError:
225 except FileNotFoundError:
225 st_mode = mode
226 st_mode = mode
226 if st_mode is None:
227 if st_mode is None:
227 st_mode = ~umask
228 st_mode = ~umask
228 st_mode &= 0o666
229 st_mode &= 0o666
229
230
230 new_mode = st_mode
231 new_mode = st_mode
231
232
232 if enforcewritable:
233 if enforcewritable:
233 new_mode |= stat.S_IWUSR
234 new_mode |= stat.S_IWUSR
234
235
235 os.chmod(dst, new_mode)
236 os.chmod(dst, new_mode)
236
237
237
238
238 def checkexec(path: bytes) -> bool:
239 def checkexec(path: bytes) -> bool:
239 """
240 """
240 Check whether the given path is on a filesystem with UNIX-like exec flags
241 Check whether the given path is on a filesystem with UNIX-like exec flags
241
242
242 Requires a directory (like /foo/.hg)
243 Requires a directory (like /foo/.hg)
243 """
244 """
244
245
245 # VFAT on some Linux versions can flip mode but it doesn't persist
246 # VFAT on some Linux versions can flip mode but it doesn't persist
246 # a FS remount. Frequently we can detect it if files are created
247 # a FS remount. Frequently we can detect it if files are created
247 # with exec bit on.
248 # with exec bit on.
248
249
249 try:
250 try:
250 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
251 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
251 basedir = os.path.join(path, b'.hg')
252 basedir = os.path.join(path, b'.hg')
252 cachedir = os.path.join(basedir, b'wcache')
253 cachedir = os.path.join(basedir, b'wcache')
253 storedir = os.path.join(basedir, b'store')
254 storedir = os.path.join(basedir, b'store')
254 if not os.path.exists(cachedir):
255 if not os.path.exists(cachedir):
255 try:
256 try:
256 # we want to create the 'cache' directory, not the '.hg' one.
257 # we want to create the 'cache' directory, not the '.hg' one.
257 # Automatically creating '.hg' directory could silently spawn
258 # Automatically creating '.hg' directory could silently spawn
258 # invalid Mercurial repositories. That seems like a bad idea.
259 # invalid Mercurial repositories. That seems like a bad idea.
259 os.mkdir(cachedir)
260 os.mkdir(cachedir)
260 if os.path.exists(storedir):
261 if os.path.exists(storedir):
261 copymode(storedir, cachedir)
262 copymode(storedir, cachedir)
262 else:
263 else:
263 copymode(basedir, cachedir)
264 copymode(basedir, cachedir)
264 except (IOError, OSError):
265 except (IOError, OSError):
265 # we other fallback logic triggers
266 # we other fallback logic triggers
266 pass
267 pass
267 if os.path.isdir(cachedir):
268 if os.path.isdir(cachedir):
268 checkisexec = os.path.join(cachedir, b'checkisexec')
269 checkisexec = os.path.join(cachedir, b'checkisexec')
269 checknoexec = os.path.join(cachedir, b'checknoexec')
270 checknoexec = os.path.join(cachedir, b'checknoexec')
270
271
271 try:
272 try:
272 m = os.stat(checkisexec).st_mode
273 m = os.stat(checkisexec).st_mode
273 except FileNotFoundError:
274 except FileNotFoundError:
274 # checkisexec does not exist - fall through ...
275 # checkisexec does not exist - fall through ...
275 pass
276 pass
276 else:
277 else:
277 # checkisexec exists, check if it actually is exec
278 # checkisexec exists, check if it actually is exec
278 if m & EXECFLAGS != 0:
279 if m & EXECFLAGS != 0:
279 # ensure checknoexec exists, check it isn't exec
280 # ensure checknoexec exists, check it isn't exec
280 try:
281 try:
281 m = os.stat(checknoexec).st_mode
282 m = os.stat(checknoexec).st_mode
282 except FileNotFoundError:
283 except FileNotFoundError:
283 open(checknoexec, b'w').close() # might fail
284 open(checknoexec, b'w').close() # might fail
284 m = os.stat(checknoexec).st_mode
285 m = os.stat(checknoexec).st_mode
285 if m & EXECFLAGS == 0:
286 if m & EXECFLAGS == 0:
286 # check-exec is exec and check-no-exec is not exec
287 # check-exec is exec and check-no-exec is not exec
287 return True
288 return True
288 # checknoexec exists but is exec - delete it
289 # checknoexec exists but is exec - delete it
289 unlink(checknoexec)
290 unlink(checknoexec)
290 # checkisexec exists but is not exec - delete it
291 # checkisexec exists but is not exec - delete it
291 unlink(checkisexec)
292 unlink(checkisexec)
292
293
293 # check using one file, leave it as checkisexec
294 # check using one file, leave it as checkisexec
294 checkdir = cachedir
295 checkdir = cachedir
295 else:
296 else:
296 # check directly in path and don't leave checkisexec behind
297 # check directly in path and don't leave checkisexec behind
297 checkdir = path
298 checkdir = path
298 checkisexec = None
299 checkisexec = None
299 fh, fn = pycompat.mkstemp(dir=checkdir, prefix=b'hg-checkexec-')
300 fh, fn = pycompat.mkstemp(dir=checkdir, prefix=b'hg-checkexec-')
300 try:
301 try:
301 os.close(fh)
302 os.close(fh)
302 m = os.stat(fn).st_mode
303 m = os.stat(fn).st_mode
303 if m & EXECFLAGS == 0:
304 if m & EXECFLAGS == 0:
304 os.chmod(fn, m & 0o777 | EXECFLAGS)
305 os.chmod(fn, m & 0o777 | EXECFLAGS)
305 if os.stat(fn).st_mode & EXECFLAGS != 0:
306 if os.stat(fn).st_mode & EXECFLAGS != 0:
306 if checkisexec is not None:
307 if checkisexec is not None:
307 os.rename(fn, checkisexec)
308 os.rename(fn, checkisexec)
308 fn = None
309 fn = None
309 return True
310 return True
310 finally:
311 finally:
311 if fn is not None:
312 if fn is not None:
312 unlink(fn)
313 unlink(fn)
313 except (IOError, OSError):
314 except (IOError, OSError):
314 # we don't care, the user probably won't be able to commit anyway
315 # we don't care, the user probably won't be able to commit anyway
315 return False
316 return False
316
317
317
318
318 def checklink(path: bytes) -> bool:
319 def checklink(path: bytes) -> bool:
319 """check whether the given path is on a symlink-capable filesystem"""
320 """check whether the given path is on a symlink-capable filesystem"""
320 # mktemp is not racy because symlink creation will fail if the
321 # mktemp is not racy because symlink creation will fail if the
321 # file already exists
322 # file already exists
322 while True:
323 while True:
323 cachedir = os.path.join(path, b'.hg', b'wcache')
324 cachedir = os.path.join(path, b'.hg', b'wcache')
324 checklink = os.path.join(cachedir, b'checklink')
325 checklink = os.path.join(cachedir, b'checklink')
325 # try fast path, read only
326 # try fast path, read only
326 if os.path.islink(checklink):
327 if os.path.islink(checklink):
327 return True
328 return True
328 if os.path.isdir(cachedir):
329 if os.path.isdir(cachedir):
329 checkdir = cachedir
330 checkdir = cachedir
330 else:
331 else:
331 checkdir = path
332 checkdir = path
332 cachedir = None
333 cachedir = None
333 name = tempfile.mktemp(
334 name = tempfile.mktemp(
334 dir=pycompat.fsdecode(checkdir), prefix=r'checklink-'
335 dir=pycompat.fsdecode(checkdir), prefix=r'checklink-'
335 )
336 )
336 name = pycompat.fsencode(name)
337 name = pycompat.fsencode(name)
337 try:
338 try:
338 fd = None
339 fd = None
339 if cachedir is None:
340 if cachedir is None:
340 fd = pycompat.namedtempfile(
341 fd = pycompat.namedtempfile(
341 dir=checkdir, prefix=b'hg-checklink-'
342 dir=checkdir, prefix=b'hg-checklink-'
342 )
343 )
343 target = os.path.basename(fd.name)
344 target = os.path.basename(fd.name)
344 else:
345 else:
345 # create a fixed file to link to; doesn't matter if it
346 # create a fixed file to link to; doesn't matter if it
346 # already exists.
347 # already exists.
347 target = b'checklink-target'
348 target = b'checklink-target'
348 try:
349 try:
349 fullpath = os.path.join(cachedir, target)
350 fullpath = os.path.join(cachedir, target)
350 open(fullpath, b'w').close()
351 open(fullpath, b'w').close()
351 except PermissionError:
352 except PermissionError:
352 # If we can't write to cachedir, just pretend
353 # If we can't write to cachedir, just pretend
353 # that the fs is readonly and by association
354 # that the fs is readonly and by association
354 # that the fs won't support symlinks. This
355 # that the fs won't support symlinks. This
355 # seems like the least dangerous way to avoid
356 # seems like the least dangerous way to avoid
356 # data loss.
357 # data loss.
357 return False
358 return False
358 try:
359 try:
359 os.symlink(target, name)
360 os.symlink(target, name)
360 if cachedir is None:
361 if cachedir is None:
361 unlink(name)
362 unlink(name)
362 else:
363 else:
363 try:
364 try:
364 os.rename(name, checklink)
365 os.rename(name, checklink)
365 except OSError:
366 except OSError:
366 unlink(name)
367 unlink(name)
367 return True
368 return True
368 except FileExistsError:
369 except FileExistsError:
369 # link creation might race, try again
370 # link creation might race, try again
370 continue
371 continue
371 finally:
372 finally:
372 if fd is not None:
373 if fd is not None:
373 fd.close()
374 fd.close()
374 except AttributeError:
375 except AttributeError:
375 return False
376 return False
376 except OSError as inst:
377 except OSError as inst:
377 # sshfs might report failure while successfully creating the link
378 # sshfs might report failure while successfully creating the link
378 if inst.errno == errno.EIO and os.path.exists(name):
379 if inst.errno == errno.EIO and os.path.exists(name):
379 unlink(name)
380 unlink(name)
380 return False
381 return False
381
382
382
383
383 def checkosfilename(path: bytes) -> Optional[bytes]:
384 def checkosfilename(path: bytes) -> Optional[bytes]:
384 """Check that the base-relative path is a valid filename on this platform.
385 """Check that the base-relative path is a valid filename on this platform.
385 Returns None if the path is ok, or a UI string describing the problem."""
386 Returns None if the path is ok, or a UI string describing the problem."""
386 return None # on posix platforms, every path is ok
387 return None # on posix platforms, every path is ok
387
388
388
389
389 def getfsmountpoint(path: bytes) -> Optional[bytes]:
390 def getfsmountpoint(path: bytes) -> Optional[bytes]:
390 """Get the filesystem mount point from a directory (best-effort)
391 """Get the filesystem mount point from a directory (best-effort)
391
392
392 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
393 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
393 """
394 """
394 return getattr(osutil, 'getfsmountpoint', lambda x: None)(path)
395 return getattr(osutil, 'getfsmountpoint', lambda x: None)(path)
395
396
396
397
397 def getfstype(path: bytes) -> Optional[bytes]:
398 def getfstype(path: bytes) -> Optional[bytes]:
398 """Get the filesystem type name from a directory (best-effort)
399 """Get the filesystem type name from a directory (best-effort)
399
400
400 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
401 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
401 """
402 """
402 return getattr(osutil, 'getfstype', lambda x: None)(path)
403 return getattr(osutil, 'getfstype', lambda x: None)(path)
403
404
404
405
405 def get_password() -> bytes:
406 def get_password() -> bytes:
406 return encoding.strtolocal(getpass.getpass(''))
407 return encoding.strtolocal(getpass.getpass(''))
407
408
408
409
409 def setbinary(fd) -> None:
410 def setbinary(fd) -> None:
410 pass
411 pass
411
412
412
413
413 def pconvert(path: bytes) -> bytes:
414 def pconvert(path: bytes) -> bytes:
414 return path
415 return path
415
416
416
417
417 def localpath(path: bytes) -> bytes:
418 def localpath(path: bytes) -> bytes:
418 return path
419 return path
419
420
420
421
421 def samefile(fpath1: bytes, fpath2: bytes) -> bool:
422 def samefile(fpath1: bytes, fpath2: bytes) -> bool:
422 """Returns whether path1 and path2 refer to the same file. This is only
423 """Returns whether path1 and path2 refer to the same file. This is only
423 guaranteed to work for files, not directories."""
424 guaranteed to work for files, not directories."""
424 return os.path.samefile(fpath1, fpath2)
425 return os.path.samefile(fpath1, fpath2)
425
426
426
427
427 def samedevice(fpath1: bytes, fpath2: bytes) -> bool:
428 def samedevice(fpath1: bytes, fpath2: bytes) -> bool:
428 """Returns whether fpath1 and fpath2 are on the same device. This is only
429 """Returns whether fpath1 and fpath2 are on the same device. This is only
429 guaranteed to work for files, not directories."""
430 guaranteed to work for files, not directories."""
430 st1 = os.lstat(fpath1)
431 st1 = os.lstat(fpath1)
431 st2 = os.lstat(fpath2)
432 st2 = os.lstat(fpath2)
432 return st1.st_dev == st2.st_dev
433 return st1.st_dev == st2.st_dev
433
434
434
435
435 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
436 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
436 def normcase(path: bytes) -> bytes:
437 def normcase(path: bytes) -> bytes:
437 return path.lower()
438 return path.lower()
438
439
439
440
440 # what normcase does to ASCII strings
441 # what normcase does to ASCII strings
441 normcasespec: int = encoding.normcasespecs.lower
442 normcasespec: int = encoding.normcasespecs.lower
442 # fallback normcase function for non-ASCII strings
443 # fallback normcase function for non-ASCII strings
443 normcasefallback = normcase
444 normcasefallback = normcase
444
445
445 if pycompat.isdarwin:
446 if pycompat.isdarwin:
446
447
447 def normcase(path: bytes) -> bytes:
448 def normcase(path: bytes) -> bytes:
448 """
449 """
449 Normalize a filename for OS X-compatible comparison:
450 Normalize a filename for OS X-compatible comparison:
450 - escape-encode invalid characters
451 - escape-encode invalid characters
451 - decompose to NFD
452 - decompose to NFD
452 - lowercase
453 - lowercase
453 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
454 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
454
455
455 >>> normcase(b'UPPER')
456 >>> normcase(b'UPPER')
456 'upper'
457 'upper'
457 >>> normcase(b'Caf\\xc3\\xa9')
458 >>> normcase(b'Caf\\xc3\\xa9')
458 'cafe\\xcc\\x81'
459 'cafe\\xcc\\x81'
459 >>> normcase(b'\\xc3\\x89')
460 >>> normcase(b'\\xc3\\x89')
460 'e\\xcc\\x81'
461 'e\\xcc\\x81'
461 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
462 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
462 '%b8%ca%c3\\xca\\xbe%c8.jpg'
463 '%b8%ca%c3\\xca\\xbe%c8.jpg'
463 """
464 """
464
465
465 try:
466 try:
466 return encoding.asciilower(path) # exception for non-ASCII
467 return encoding.asciilower(path) # exception for non-ASCII
467 except UnicodeDecodeError:
468 except UnicodeDecodeError:
468 return normcasefallback(path)
469 return normcasefallback(path)
469
470
470 normcasespec = encoding.normcasespecs.lower
471 normcasespec = encoding.normcasespecs.lower
471
472
472 def normcasefallback(path: bytes) -> bytes:
473 def normcasefallback(path: bytes) -> bytes:
473 try:
474 try:
474 u = path.decode('utf-8')
475 u = path.decode('utf-8')
475 except UnicodeDecodeError:
476 except UnicodeDecodeError:
476 # OS X percent-encodes any bytes that aren't valid utf-8
477 # OS X percent-encodes any bytes that aren't valid utf-8
477 s = b''
478 s = b''
478 pos = 0
479 pos = 0
479 l = len(path)
480 l = len(path)
480 while pos < l:
481 while pos < l:
481 try:
482 try:
482 c = encoding.getutf8char(path, pos)
483 c = encoding.getutf8char(path, pos)
483 pos += len(c)
484 pos += len(c)
484 except ValueError:
485 except ValueError:
485 c = b'%%%02X' % ord(path[pos : pos + 1])
486 c = b'%%%02X' % ord(path[pos : pos + 1])
486 pos += 1
487 pos += 1
487 s += c
488 s += c
488
489
489 u = s.decode('utf-8')
490 u = s.decode('utf-8')
490
491
491 # Decompose then lowercase (HFS+ technote specifies lower)
492 # Decompose then lowercase (HFS+ technote specifies lower)
492 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
493 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
493 # drop HFS+ ignored characters
494 # drop HFS+ ignored characters
494 return encoding.hfsignoreclean(enc)
495 return encoding.hfsignoreclean(enc)
495
496
496
497
497 if pycompat.sysplatform == b'cygwin':
498 if pycompat.sysplatform == b'cygwin':
498 # workaround for cygwin, in which mount point part of path is
499 # workaround for cygwin, in which mount point part of path is
499 # treated as case sensitive, even though underlying NTFS is case
500 # treated as case sensitive, even though underlying NTFS is case
500 # insensitive.
501 # insensitive.
501
502
502 # default mount points
503 # default mount points
503 cygwinmountpoints = sorted(
504 cygwinmountpoints = sorted(
504 [
505 [
505 b"/usr/bin",
506 b"/usr/bin",
506 b"/usr/lib",
507 b"/usr/lib",
507 b"/cygdrive",
508 b"/cygdrive",
508 ],
509 ],
509 reverse=True,
510 reverse=True,
510 )
511 )
511
512
512 # use upper-ing as normcase as same as NTFS workaround
513 # use upper-ing as normcase as same as NTFS workaround
513 def normcase(path: bytes) -> bytes:
514 def normcase(path: bytes) -> bytes:
514 pathlen = len(path)
515 pathlen = len(path)
515 if (pathlen == 0) or (path[0] != pycompat.ossep):
516 if (pathlen == 0) or (path[0] != pycompat.ossep):
516 # treat as relative
517 # treat as relative
517 return encoding.upper(path)
518 return encoding.upper(path)
518
519
519 # to preserve case of mountpoint part
520 # to preserve case of mountpoint part
520 for mp in cygwinmountpoints:
521 for mp in cygwinmountpoints:
521 if not path.startswith(mp):
522 if not path.startswith(mp):
522 continue
523 continue
523
524
524 mplen = len(mp)
525 mplen = len(mp)
525 if mplen == pathlen: # mount point itself
526 if mplen == pathlen: # mount point itself
526 return mp
527 return mp
527 if path[mplen] == pycompat.ossep:
528 if path[mplen] == pycompat.ossep:
528 return mp + encoding.upper(path[mplen:])
529 return mp + encoding.upper(path[mplen:])
529
530
530 return encoding.upper(path)
531 return encoding.upper(path)
531
532
532 normcasespec = encoding.normcasespecs.other
533 normcasespec = encoding.normcasespecs.other
533 normcasefallback = normcase
534 normcasefallback = normcase
534
535
535 # Cygwin translates native ACLs to POSIX permissions,
536 # Cygwin translates native ACLs to POSIX permissions,
536 # but these translations are not supported by native
537 # but these translations are not supported by native
537 # tools, so the exec bit tends to be set erroneously.
538 # tools, so the exec bit tends to be set erroneously.
538 # Therefore, disable executable bit access on Cygwin.
539 # Therefore, disable executable bit access on Cygwin.
539 def checkexec(path: bytes) -> bool:
540 def checkexec(path: bytes) -> bool:
540 return False
541 return False
541
542
542 # Similarly, Cygwin's symlink emulation is likely to create
543 # Similarly, Cygwin's symlink emulation is likely to create
543 # problems when Mercurial is used from both Cygwin and native
544 # problems when Mercurial is used from both Cygwin and native
544 # Windows, with other native tools, or on shared volumes
545 # Windows, with other native tools, or on shared volumes
545 def checklink(path: bytes) -> bool:
546 def checklink(path: bytes) -> bool:
546 return False
547 return False
547
548
548
549
549 if pycompat.sysplatform == b'OpenVMS':
550 if pycompat.sysplatform == b'OpenVMS':
550 # OpenVMS's symlink emulation is broken on some OpenVMS versions.
551 # OpenVMS's symlink emulation is broken on some OpenVMS versions.
551 def checklink(path: bytes) -> bool:
552 def checklink(path: bytes) -> bool:
552 return False
553 return False
553
554
554
555
555 _needsshellquote: Optional[Match[bytes]] = None
556 _needsshellquote: Optional[Callable[[bytes], Optional[Match[bytes]]]] = None
556
557
557
558
558 def shellquote(s: bytes) -> bytes:
559 def shellquote(s: bytes) -> bytes:
559 if pycompat.sysplatform == b'OpenVMS':
560 if pycompat.sysplatform == b'OpenVMS':
560 return b'"%s"' % s
561 return b'"%s"' % s
561 global _needsshellquote
562 global _needsshellquote
562 if _needsshellquote is None:
563 if _needsshellquote is None:
563 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
564 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
564 if s and not _needsshellquote(s):
565 if s and not _needsshellquote(s):
565 # "s" shouldn't have to be quoted
566 # "s" shouldn't have to be quoted
566 return s
567 return s
567 else:
568 else:
568 return b"'%s'" % s.replace(b"'", b"'\\''")
569 return b"'%s'" % s.replace(b"'", b"'\\''")
569
570
570
571
571 def shellsplit(s: bytes) -> List[bytes]:
572 def shellsplit(s: bytes) -> List[bytes]:
572 """Parse a command string in POSIX shell way (best-effort)"""
573 """Parse a command string in POSIX shell way (best-effort)"""
573 return pycompat.shlexsplit(s, posix=True)
574 return pycompat.shlexsplit(s, posix=True)
574
575
575
576
576 def testpid(pid: int) -> bool:
577 def testpid(pid: int) -> bool:
577 '''return False if pid dead, True if running or not sure'''
578 '''return False if pid dead, True if running or not sure'''
578 if pycompat.sysplatform == b'OpenVMS':
579 if pycompat.sysplatform == b'OpenVMS':
579 return True
580 return True
580 try:
581 try:
581 os.kill(pid, 0)
582 os.kill(pid, 0)
582 return True
583 return True
583 except OSError as inst:
584 except OSError as inst:
584 return inst.errno != errno.ESRCH
585 return inst.errno != errno.ESRCH
585
586
586
587
587 def isowner(st: os.stat_result) -> bool:
588 def isowner(st: os.stat_result) -> bool:
588 """Return True if the stat object st is from the current user."""
589 """Return True if the stat object st is from the current user."""
589 return st.st_uid == os.getuid()
590 return st.st_uid == os.getuid()
590
591
591
592
592 def findexe(command: bytes) -> Optional[bytes]:
593 def findexe(command: bytes) -> Optional[bytes]:
593 """Find executable for command searching like which does.
594 """Find executable for command searching like which does.
594 If command is a basename then PATH is searched for command.
595 If command is a basename then PATH is searched for command.
595 PATH isn't searched if command is an absolute or relative path.
596 PATH isn't searched if command is an absolute or relative path.
596 If command isn't found None is returned."""
597 If command isn't found None is returned."""
597 if pycompat.sysplatform == b'OpenVMS':
598 if pycompat.sysplatform == b'OpenVMS':
598 return command
599 return command
599
600
600 def findexisting(executable: bytes) -> Optional[bytes]:
601 def findexisting(executable: bytes) -> Optional[bytes]:
601 b'Will return executable if existing file'
602 b'Will return executable if existing file'
602 if os.path.isfile(executable) and os.access(executable, os.X_OK):
603 if os.path.isfile(executable) and os.access(executable, os.X_OK):
603 return executable
604 return executable
604 return None
605 return None
605
606
606 if pycompat.ossep in command:
607 if pycompat.ossep in command:
607 return findexisting(command)
608 return findexisting(command)
608
609
609 if pycompat.sysplatform == b'plan9':
610 if pycompat.sysplatform == b'plan9':
610 return findexisting(os.path.join(b'/bin', command))
611 return findexisting(os.path.join(b'/bin', command))
611
612
612 for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
613 for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
613 executable = findexisting(os.path.join(path, command))
614 executable = findexisting(os.path.join(path, command))
614 if executable is not None:
615 if executable is not None:
615 return executable
616 return executable
616 return None
617 return None
617
618
618
619
619 def setsignalhandler() -> None:
620 def setsignalhandler() -> None:
620 pass
621 pass
621
622
622
623
623 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
624 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
624
625
625
626
626 def statfiles(files: Sequence[bytes]) -> Iterator[Optional[os.stat_result]]:
627 def statfiles(files: Sequence[bytes]) -> Iterator[Optional[os.stat_result]]:
627 """Stat each file in files. Yield each stat, or None if a file does not
628 """Stat each file in files. Yield each stat, or None if a file does not
628 exist or has a type we don't care about."""
629 exist or has a type we don't care about."""
629 lstat = os.lstat
630 lstat = os.lstat
630 getkind = stat.S_IFMT
631 getkind = stat.S_IFMT
631 for nf in files:
632 for nf in files:
632 try:
633 try:
633 st = lstat(nf)
634 st = lstat(nf)
634 if getkind(st.st_mode) not in _wantedkinds:
635 if getkind(st.st_mode) not in _wantedkinds:
635 st = None
636 st = None
636 except (FileNotFoundError, NotADirectoryError):
637 except (FileNotFoundError, NotADirectoryError):
637 st = None
638 st = None
638 yield st
639 yield st
639
640
640
641
641 def getuser() -> bytes:
642 def getuser() -> bytes:
642 '''return name of current user'''
643 '''return name of current user'''
643 return pycompat.fsencode(getpass.getuser())
644 return pycompat.fsencode(getpass.getuser())
644
645
645
646
646 def username(uid: Optional[int] = None) -> Optional[bytes]:
647 def username(uid: Optional[int] = None) -> Optional[bytes]:
647 """Return the name of the user with the given uid.
648 """Return the name of the user with the given uid.
648
649
649 If uid is None, return the name of the current user."""
650 If uid is None, return the name of the current user."""
650
651
651 if uid is None:
652 if uid is None:
652 uid = os.getuid()
653 uid = os.getuid()
653 try:
654 try:
654 return pycompat.fsencode(pwd.getpwuid(uid)[0])
655 return pycompat.fsencode(pwd.getpwuid(uid)[0])
655 except KeyError:
656 except KeyError:
656 return b'%d' % uid
657 return b'%d' % uid
657
658
658
659
659 def groupname(gid: Optional[int] = None) -> Optional[bytes]:
660 def groupname(gid: Optional[int] = None) -> Optional[bytes]:
660 """Return the name of the group with the given gid.
661 """Return the name of the group with the given gid.
661
662
662 If gid is None, return the name of the current group."""
663 If gid is None, return the name of the current group."""
663
664
664 if gid is None:
665 if gid is None:
665 gid = os.getgid()
666 gid = os.getgid()
666 try:
667 try:
667 return pycompat.fsencode(grp.getgrgid(gid)[0])
668 return pycompat.fsencode(grp.getgrgid(gid)[0])
668 except KeyError:
669 except KeyError:
669 return pycompat.bytestr(gid)
670 return pycompat.bytestr(gid)
670
671
671
672
672 def groupmembers(name: bytes) -> List[bytes]:
673 def groupmembers(name: bytes) -> List[bytes]:
673 """Return the list of members of the group with the given
674 """Return the list of members of the group with the given
674 name, KeyError if the group does not exist.
675 name, KeyError if the group does not exist.
675 """
676 """
676 name = pycompat.fsdecode(name)
677 name = pycompat.fsdecode(name)
677 return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
678 return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
678
679
679
680
680 def spawndetached(args: List[bytes]) -> int:
681 def spawndetached(args: List[bytes]) -> int:
681 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
682 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
682
683
683
684
684 def gethgcmd(): # TODO: convert to bytes, like on Windows?
685 def gethgcmd(): # TODO: convert to bytes, like on Windows?
685 return sys.argv[:1]
686 return sys.argv[:1]
686
687
687
688
688 def makedir(path: bytes, notindexed: bool) -> None:
689 def makedir(path: bytes, notindexed: bool) -> None:
689 os.mkdir(path)
690 os.mkdir(path)
690
691
691
692
692 def lookupreg(
693 def lookupreg(
693 key: bytes,
694 key: bytes,
694 valname: Optional[bytes] = None,
695 valname: Optional[bytes] = None,
695 scope: Optional[Union[int, Iterable[int]]] = None,
696 scope: Optional[Union[int, Iterable[int]]] = None,
696 ) -> Optional[bytes]:
697 ) -> Optional[bytes]:
697 return None
698 return None
698
699
699
700
700 def hidewindow() -> None:
701 def hidewindow() -> None:
701 """Hide current shell window.
702 """Hide current shell window.
702
703
703 Used to hide the window opened when starting asynchronous
704 Used to hide the window opened when starting asynchronous
704 child process under Windows, unneeded on other systems.
705 child process under Windows, unneeded on other systems.
705 """
706 """
706 pass
707 pass
707
708
708
709
709 class cachestat:
710 class cachestat:
710 stat: os.stat_result
711 stat: os.stat_result
711
712
712 def __init__(self, path: bytes) -> None:
713 def __init__(self, path: bytes) -> None:
713 self.stat = os.stat(path)
714 self.stat = os.stat(path)
714
715
715 def cacheable(self) -> bool:
716 def cacheable(self) -> bool:
716 return bool(self.stat.st_ino)
717 return bool(self.stat.st_ino)
717
718
718 __hash__ = object.__hash__
719 __hash__ = object.__hash__
719
720
720 def __eq__(self, other: Any) -> bool:
721 def __eq__(self, other: Any) -> bool:
721 try:
722 try:
722 # Only dev, ino, size, mtime and atime are likely to change. Out
723 # Only dev, ino, size, mtime and atime are likely to change. Out
723 # of these, we shouldn't compare atime but should compare the
724 # of these, we shouldn't compare atime but should compare the
724 # rest. However, one of the other fields changing indicates
725 # rest. However, one of the other fields changing indicates
725 # something fishy going on, so return False if anything but atime
726 # something fishy going on, so return False if anything but atime
726 # changes.
727 # changes.
727 return (
728 return (
728 self.stat.st_mode == other.stat.st_mode
729 self.stat.st_mode == other.stat.st_mode
729 and self.stat.st_ino == other.stat.st_ino
730 and self.stat.st_ino == other.stat.st_ino
730 and self.stat.st_dev == other.stat.st_dev
731 and self.stat.st_dev == other.stat.st_dev
731 and self.stat.st_nlink == other.stat.st_nlink
732 and self.stat.st_nlink == other.stat.st_nlink
732 and self.stat.st_uid == other.stat.st_uid
733 and self.stat.st_uid == other.stat.st_uid
733 and self.stat.st_gid == other.stat.st_gid
734 and self.stat.st_gid == other.stat.st_gid
734 and self.stat.st_size == other.stat.st_size
735 and self.stat.st_size == other.stat.st_size
735 and self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME]
736 and self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME]
736 and self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME]
737 and self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME]
737 )
738 )
738 except AttributeError:
739 except AttributeError:
739 return False
740 return False
740
741
741 def __ne__(self, other: Any) -> bool:
742 def __ne__(self, other: Any) -> bool:
742 return not self == other
743 return not self == other
743
744
744
745
745 def statislink(st: Optional[os.stat_result]) -> bool:
746 def statislink(st: Optional[os.stat_result]) -> bool:
746 '''check whether a stat result is a symlink'''
747 '''check whether a stat result is a symlink'''
747 return stat.S_ISLNK(st.st_mode) if st else False
748 return stat.S_ISLNK(st.st_mode) if st else False
748
749
749
750
750 def statisexec(st: Optional[os.stat_result]) -> bool:
751 def statisexec(st: Optional[os.stat_result]) -> bool:
751 '''check whether a stat result is an executable file'''
752 '''check whether a stat result is an executable file'''
752 return (st.st_mode & 0o100 != 0) if st else False
753 return (st.st_mode & 0o100 != 0) if st else False
753
754
754
755
755 def poll(fds):
756 def poll(fds):
756 """block until something happens on any file descriptor
757 """block until something happens on any file descriptor
757
758
758 This is a generic helper that will check for any activity
759 This is a generic helper that will check for any activity
759 (read, write. exception) and return the list of touched files.
760 (read, write. exception) and return the list of touched files.
760
761
761 In unsupported cases, it will raise a NotImplementedError"""
762 In unsupported cases, it will raise a NotImplementedError"""
762 try:
763 try:
763 res = select.select(fds, fds, fds)
764 res = select.select(fds, fds, fds)
764 except ValueError: # out of range file descriptor
765 except ValueError: # out of range file descriptor
765 raise NotImplementedError()
766 raise NotImplementedError()
766 return sorted(list(set(sum(res, []))))
767 return sorted(list(set(sum(res, []))))
767
768
768
769
769 def readpipe(pipe) -> bytes:
770 def readpipe(pipe) -> bytes:
770 """Read all available data from a pipe."""
771 """Read all available data from a pipe."""
771 # We can't fstat() a pipe because Linux will always report 0.
772 # We can't fstat() a pipe because Linux will always report 0.
772 # So, we set the pipe to non-blocking mode and read everything
773 # So, we set the pipe to non-blocking mode and read everything
773 # that's available.
774 # that's available.
774 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
775 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
775 flags |= os.O_NONBLOCK
776 flags |= os.O_NONBLOCK
776 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
777 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
777
778
778 try:
779 try:
779 chunks = []
780 chunks = []
780 while True:
781 while True:
781 try:
782 try:
782 s = pipe.read()
783 s = pipe.read()
783 if not s:
784 if not s:
784 break
785 break
785 chunks.append(s)
786 chunks.append(s)
786 except IOError:
787 except IOError:
787 break
788 break
788
789
789 return b''.join(chunks)
790 return b''.join(chunks)
790 finally:
791 finally:
791 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
792 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
792
793
793
794
794 def bindunixsocket(sock, path: bytes) -> None:
795 def bindunixsocket(sock, path: bytes) -> None:
795 """Bind the UNIX domain socket to the specified path"""
796 """Bind the UNIX domain socket to the specified path"""
796 # use relative path instead of full path at bind() if possible, since
797 # use relative path instead of full path at bind() if possible, since
797 # AF_UNIX path has very small length limit (107 chars) on common
798 # AF_UNIX path has very small length limit (107 chars) on common
798 # platforms (see sys/un.h)
799 # platforms (see sys/un.h)
799 dirname, basename = os.path.split(path)
800 dirname, basename = os.path.split(path)
800 bakwdfd = None
801 bakwdfd = None
801
802
802 try:
803 try:
803 if dirname:
804 if dirname:
804 bakwdfd = os.open(b'.', os.O_DIRECTORY)
805 bakwdfd = os.open(b'.', os.O_DIRECTORY)
805 os.chdir(dirname)
806 os.chdir(dirname)
806 sock.bind(basename)
807 sock.bind(basename)
807 if bakwdfd:
808 if bakwdfd:
808 os.fchdir(bakwdfd)
809 os.fchdir(bakwdfd)
809 finally:
810 finally:
810 if bakwdfd:
811 if bakwdfd:
811 os.close(bakwdfd)
812 os.close(bakwdfd)
@@ -1,1103 +1,1103
1 ## statprof.py
1 ## statprof.py
2 ## Copyright (C) 2012 Bryan O'Sullivan <bos@serpentine.com>
2 ## Copyright (C) 2012 Bryan O'Sullivan <bos@serpentine.com>
3 ## Copyright (C) 2011 Alex Fraser <alex at phatcore dot com>
3 ## Copyright (C) 2011 Alex Fraser <alex at phatcore dot com>
4 ## Copyright (C) 2004,2005 Andy Wingo <wingo at pobox dot com>
4 ## Copyright (C) 2004,2005 Andy Wingo <wingo at pobox dot com>
5 ## Copyright (C) 2001 Rob Browning <rlb at defaultvalue dot org>
5 ## Copyright (C) 2001 Rob Browning <rlb at defaultvalue dot org>
6
6
7 ## This library is free software; you can redistribute it and/or
7 ## This library is free software; you can redistribute it and/or
8 ## modify it under the terms of the GNU Lesser General Public
8 ## modify it under the terms of the GNU Lesser General Public
9 ## License as published by the Free Software Foundation; either
9 ## License as published by the Free Software Foundation; either
10 ## version 2.1 of the License, or (at your option) any later version.
10 ## version 2.1 of the License, or (at your option) any later version.
11 ##
11 ##
12 ## This library is distributed in the hope that it will be useful,
12 ## This library is distributed in the hope that it will be useful,
13 ## but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ## but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 ## Lesser General Public License for more details.
15 ## Lesser General Public License for more details.
16 ##
16 ##
17 ## You should have received a copy of the GNU Lesser General Public
17 ## You should have received a copy of the GNU Lesser General Public
18 ## License along with this program; if not, contact:
18 ## License along with this program; if not, contact:
19 ##
19 ##
20 ## Free Software Foundation Voice: +1-617-542-5942
20 ## Free Software Foundation Voice: +1-617-542-5942
21 ## 59 Temple Place - Suite 330 Fax: +1-617-542-2652
21 ## 59 Temple Place - Suite 330 Fax: +1-617-542-2652
22 ## Boston, MA 02111-1307, USA gnu@gnu.org
22 ## Boston, MA 02111-1307, USA gnu@gnu.org
23
23
24 """
24 """
25 statprof is intended to be a fairly simple statistical profiler for
25 statprof is intended to be a fairly simple statistical profiler for
26 python. It was ported directly from a statistical profiler for guile,
26 python. It was ported directly from a statistical profiler for guile,
27 also named statprof, available from guile-lib [0].
27 also named statprof, available from guile-lib [0].
28
28
29 [0] http://wingolog.org/software/guile-lib/statprof/
29 [0] http://wingolog.org/software/guile-lib/statprof/
30
30
31 To start profiling, call statprof.start():
31 To start profiling, call statprof.start():
32 >>> start()
32 >>> start()
33
33
34 Then run whatever it is that you want to profile, for example:
34 Then run whatever it is that you want to profile, for example:
35 >>> import test.pystone; test.pystone.pystones()
35 >>> import test.pystone; test.pystone.pystones()
36
36
37 Then stop the profiling and print out the results:
37 Then stop the profiling and print out the results:
38 >>> stop()
38 >>> stop()
39 >>> display()
39 >>> display()
40 % cumulative self
40 % cumulative self
41 time seconds seconds name
41 time seconds seconds name
42 26.72 1.40 0.37 pystone.py:79:Proc0
42 26.72 1.40 0.37 pystone.py:79:Proc0
43 13.79 0.56 0.19 pystone.py:133:Proc1
43 13.79 0.56 0.19 pystone.py:133:Proc1
44 13.79 0.19 0.19 pystone.py:208:Proc8
44 13.79 0.19 0.19 pystone.py:208:Proc8
45 10.34 0.16 0.14 pystone.py:229:Func2
45 10.34 0.16 0.14 pystone.py:229:Func2
46 6.90 0.10 0.10 pystone.py:45:__init__
46 6.90 0.10 0.10 pystone.py:45:__init__
47 4.31 0.16 0.06 pystone.py:53:copy
47 4.31 0.16 0.06 pystone.py:53:copy
48 ...
48 ...
49
49
50 All of the numerical data is statistically approximate. In the
50 All of the numerical data is statistically approximate. In the
51 following column descriptions, and in all of statprof, "time" refers
51 following column descriptions, and in all of statprof, "time" refers
52 to execution time (both user and system), not wall clock time.
52 to execution time (both user and system), not wall clock time.
53
53
54 % time
54 % time
55 The percent of the time spent inside the procedure itself (not
55 The percent of the time spent inside the procedure itself (not
56 counting children).
56 counting children).
57
57
58 cumulative seconds
58 cumulative seconds
59 The total number of seconds spent in the procedure, including
59 The total number of seconds spent in the procedure, including
60 children.
60 children.
61
61
62 self seconds
62 self seconds
63 The total number of seconds spent in the procedure itself (not
63 The total number of seconds spent in the procedure itself (not
64 counting children).
64 counting children).
65
65
66 name
66 name
67 The name of the procedure.
67 The name of the procedure.
68
68
69 By default statprof keeps the data collected from previous runs. If you
69 By default statprof keeps the data collected from previous runs. If you
70 want to clear the collected data, call reset():
70 want to clear the collected data, call reset():
71 >>> reset()
71 >>> reset()
72
72
73 reset() can also be used to change the sampling frequency from the
73 reset() can also be used to change the sampling frequency from the
74 default of 1000 Hz. For example, to tell statprof to sample 50 times a
74 default of 1000 Hz. For example, to tell statprof to sample 50 times a
75 second:
75 second:
76 >>> reset(50)
76 >>> reset(50)
77
77
78 This means that statprof will sample the call stack after every 1/50 of
78 This means that statprof will sample the call stack after every 1/50 of
79 a second of user + system time spent running on behalf of the python
79 a second of user + system time spent running on behalf of the python
80 process. When your process is idle (for example, blocking in a read(),
80 process. When your process is idle (for example, blocking in a read(),
81 as is the case at the listener), the clock does not advance. For this
81 as is the case at the listener), the clock does not advance. For this
82 reason statprof is not currently not suitable for profiling io-bound
82 reason statprof is not currently not suitable for profiling io-bound
83 operations.
83 operations.
84
84
85 The profiler uses the hash of the code object itself to identify the
85 The profiler uses the hash of the code object itself to identify the
86 procedures, so it won't confuse different procedures with the same name.
86 procedures, so it won't confuse different procedures with the same name.
87 They will show up as two different rows in the output.
87 They will show up as two different rows in the output.
88
88
89 Right now the profiler is quite simplistic. I cannot provide
89 Right now the profiler is quite simplistic. I cannot provide
90 call-graphs or other higher level information. What you see in the
90 call-graphs or other higher level information. What you see in the
91 table is pretty much all there is. Patches are welcome :-)
91 table is pretty much all there is. Patches are welcome :-)
92
92
93
93
94 Threading
94 Threading
95 ---------
95 ---------
96
96
97 Because signals only get delivered to the main thread in Python,
97 Because signals only get delivered to the main thread in Python,
98 statprof only profiles the main thread. However because the time
98 statprof only profiles the main thread. However because the time
99 reporting function uses per-process timers, the results can be
99 reporting function uses per-process timers, the results can be
100 significantly off if other threads' work patterns are not similar to the
100 significantly off if other threads' work patterns are not similar to the
101 main thread's work patterns.
101 main thread's work patterns.
102 """
102 """
103 # no-check-code
103 # no-check-code
104
104
105 import collections
105 import collections
106 import contextlib
106 import contextlib
107 import getopt
107 import getopt
108 import inspect
108 import inspect
109 import json
109 import json
110 import os
110 import os
111 import signal
111 import signal
112 import sys
112 import sys
113 import threading
113 import threading
114 import time
114 import time
115
115
116 from typing import (
116 from typing import (
117 List,
117 List,
118 )
118 )
119
119
120 from .pycompat import open
120 from .pycompat import open
121 from . import (
121 from . import (
122 encoding,
122 encoding,
123 pycompat,
123 pycompat,
124 )
124 )
125
125
126 defaultdict = collections.defaultdict
126 defaultdict = collections.defaultdict
127 contextmanager = contextlib.contextmanager
127 contextmanager = contextlib.contextmanager
128
128
129 __all__ = [b'start', b'stop', b'reset', b'display', b'profile']
129 __all__ = ['start', 'stop', 'reset', 'display', 'profile']
130
130
131 skips = {
131 skips = {
132 "util.py:check",
132 "util.py:check",
133 "extensions.py:closure",
133 "extensions.py:closure",
134 "color.py:colorcmd",
134 "color.py:colorcmd",
135 "dispatch.py:checkargs",
135 "dispatch.py:checkargs",
136 "dispatch.py:<lambda>",
136 "dispatch.py:<lambda>",
137 "dispatch.py:_runcatch",
137 "dispatch.py:_runcatch",
138 "dispatch.py:_dispatch",
138 "dispatch.py:_dispatch",
139 "dispatch.py:_runcommand",
139 "dispatch.py:_runcommand",
140 "pager.py:pagecmd",
140 "pager.py:pagecmd",
141 "dispatch.py:run",
141 "dispatch.py:run",
142 "dispatch.py:dispatch",
142 "dispatch.py:dispatch",
143 "dispatch.py:runcommand",
143 "dispatch.py:runcommand",
144 "hg.py:<module>",
144 "hg.py:<module>",
145 "evolve.py:warnobserrors",
145 "evolve.py:warnobserrors",
146 }
146 }
147
147
148 ###########################################################################
148 ###########################################################################
149 ## Utils
149 ## Utils
150
150
151
151
152 def clock():
152 def clock():
153 times = os.times()
153 times = os.times()
154 return (times[0] + times[1], times[4])
154 return (times[0] + times[1], times[4])
155
155
156
156
157 ###########################################################################
157 ###########################################################################
158 ## Collection data structures
158 ## Collection data structures
159
159
160
160
161 class ProfileState:
161 class ProfileState:
162 samples: List["Sample"]
162 samples: List["Sample"]
163
163
164 def __init__(self, frequency=None):
164 def __init__(self, frequency=None):
165 self.reset(frequency)
165 self.reset(frequency)
166 self.track = b'cpu'
166 self.track = b'cpu'
167
167
168 def reset(self, frequency=None):
168 def reset(self, frequency=None):
169 # total so far
169 # total so far
170 self.accumulated_time = (0.0, 0.0)
170 self.accumulated_time = (0.0, 0.0)
171 # start_time when timer is active
171 # start_time when timer is active
172 self.last_start_time = None
172 self.last_start_time = None
173 # a float
173 # a float
174 if frequency:
174 if frequency:
175 self.sample_interval = 1.0 / frequency
175 self.sample_interval = 1.0 / frequency
176 elif not hasattr(self, 'sample_interval'):
176 elif not hasattr(self, 'sample_interval'):
177 # default to 1000 Hz
177 # default to 1000 Hz
178 self.sample_interval = 1.0 / 1000.0
178 self.sample_interval = 1.0 / 1000.0
179 else:
179 else:
180 # leave the frequency as it was
180 # leave the frequency as it was
181 pass
181 pass
182 self.remaining_prof_time = None
182 self.remaining_prof_time = None
183 # for user start/stop nesting
183 # for user start/stop nesting
184 self.profile_level = 0
184 self.profile_level = 0
185
185
186 self.samples = []
186 self.samples = []
187
187
188 def accumulate_time(self, stop_time):
188 def accumulate_time(self, stop_time):
189 increment = (
189 increment = (
190 stop_time[0] - self.last_start_time[0],
190 stop_time[0] - self.last_start_time[0],
191 stop_time[1] - self.last_start_time[1],
191 stop_time[1] - self.last_start_time[1],
192 )
192 )
193 self.accumulated_time = (
193 self.accumulated_time = (
194 self.accumulated_time[0] + increment[0],
194 self.accumulated_time[0] + increment[0],
195 self.accumulated_time[1] + increment[1],
195 self.accumulated_time[1] + increment[1],
196 )
196 )
197
197
198 def seconds_per_sample(self):
198 def seconds_per_sample(self):
199 return self.accumulated_time[self.timeidx] / len(self.samples)
199 return self.accumulated_time[self.timeidx] / len(self.samples)
200
200
201 @property
201 @property
202 def timeidx(self):
202 def timeidx(self):
203 if self.track == b'real':
203 if self.track == b'real':
204 return 1
204 return 1
205 return 0
205 return 0
206
206
207
207
208 state = ProfileState()
208 state = ProfileState()
209
209
210
210
211 class CodeSite:
211 class CodeSite:
212 cache = {}
212 cache = {}
213
213
214 __slots__ = ('path', 'lineno', 'function', 'source')
214 __slots__ = ('path', 'lineno', 'function', 'source')
215
215
216 def __init__(self, path, lineno, function):
216 def __init__(self, path, lineno, function):
217 assert isinstance(path, bytes)
217 assert isinstance(path, bytes)
218 self.path = path
218 self.path = path
219 self.lineno = lineno
219 self.lineno = lineno
220 assert isinstance(function, bytes)
220 assert isinstance(function, bytes)
221 self.function = function
221 self.function = function
222 self.source = None
222 self.source = None
223
223
224 def __eq__(self, other):
224 def __eq__(self, other):
225 try:
225 try:
226 return self.lineno == other.lineno and self.path == other.path
226 return self.lineno == other.lineno and self.path == other.path
227 except:
227 except:
228 return False
228 return False
229
229
230 def __hash__(self):
230 def __hash__(self):
231 return hash((self.lineno, self.path))
231 return hash((self.lineno, self.path))
232
232
233 @classmethod
233 @classmethod
234 def get(cls, path, lineno, function):
234 def get(cls, path, lineno, function):
235 k = (path, lineno)
235 k = (path, lineno)
236 try:
236 try:
237 return cls.cache[k]
237 return cls.cache[k]
238 except KeyError:
238 except KeyError:
239 v = cls(path, lineno, function)
239 v = cls(path, lineno, function)
240 cls.cache[k] = v
240 cls.cache[k] = v
241 return v
241 return v
242
242
243 def getsource(self, length):
243 def getsource(self, length):
244 if self.source is None:
244 if self.source is None:
245 try:
245 try:
246 lineno = self.lineno - 1 # lineno can be None
246 lineno = self.lineno - 1 # lineno can be None
247 with open(self.path, b'rb') as fp:
247 with open(self.path, b'rb') as fp:
248 for i, line in enumerate(fp):
248 for i, line in enumerate(fp):
249 if i == lineno:
249 if i == lineno:
250 self.source = line.strip()
250 self.source = line.strip()
251 break
251 break
252 except:
252 except:
253 pass
253 pass
254 if self.source is None:
254 if self.source is None:
255 self.source = b''
255 self.source = b''
256
256
257 source = self.source
257 source = self.source
258 if len(source) > length:
258 if len(source) > length:
259 source = source[: (length - 3)] + b"..."
259 source = source[: (length - 3)] + b"..."
260 return source
260 return source
261
261
262 def filename(self):
262 def filename(self):
263 return os.path.basename(self.path)
263 return os.path.basename(self.path)
264
264
265 def skipname(self):
265 def skipname(self):
266 return '%s:%s' % (self.filename(), self.function)
266 return '%s:%s' % (self.filename(), self.function)
267
267
268
268
269 class Sample:
269 class Sample:
270 __slots__ = ('stack', 'time')
270 __slots__ = ('stack', 'time')
271
271
272 def __init__(self, stack, time):
272 def __init__(self, stack, time):
273 self.stack = stack
273 self.stack = stack
274 self.time = time
274 self.time = time
275
275
276 @classmethod
276 @classmethod
277 def from_frame(cls, frame, time):
277 def from_frame(cls, frame, time):
278 stack = []
278 stack = []
279
279
280 while frame:
280 while frame:
281 stack.append(
281 stack.append(
282 CodeSite.get(
282 CodeSite.get(
283 pycompat.sysbytes(frame.f_code.co_filename),
283 pycompat.sysbytes(frame.f_code.co_filename),
284 frame.f_lineno,
284 frame.f_lineno,
285 pycompat.sysbytes(frame.f_code.co_name),
285 pycompat.sysbytes(frame.f_code.co_name),
286 )
286 )
287 )
287 )
288 frame = frame.f_back
288 frame = frame.f_back
289
289
290 return Sample(stack, time)
290 return Sample(stack, time)
291
291
292
292
293 ###########################################################################
293 ###########################################################################
294 ## SIGPROF handler
294 ## SIGPROF handler
295
295
296
296
297 def profile_signal_handler(signum, frame):
297 def profile_signal_handler(signum, frame):
298 if state.profile_level > 0:
298 if state.profile_level > 0:
299 now = clock()
299 now = clock()
300 state.accumulate_time(now)
300 state.accumulate_time(now)
301
301
302 timestamp = state.accumulated_time[state.timeidx]
302 timestamp = state.accumulated_time[state.timeidx]
303 state.samples.append(Sample.from_frame(frame, timestamp))
303 state.samples.append(Sample.from_frame(frame, timestamp))
304
304
305 signal.setitimer(signal.ITIMER_PROF, state.sample_interval, 0.0)
305 signal.setitimer(signal.ITIMER_PROF, state.sample_interval, 0.0)
306 state.last_start_time = now
306 state.last_start_time = now
307
307
308
308
309 stopthread = threading.Event()
309 stopthread = threading.Event()
310
310
311
311
312 def samplerthread(tid):
312 def samplerthread(tid):
313 while not stopthread.is_set():
313 while not stopthread.is_set():
314 now = clock()
314 now = clock()
315 state.accumulate_time(now)
315 state.accumulate_time(now)
316
316
317 frame = sys._current_frames()[tid]
317 frame = sys._current_frames()[tid]
318
318
319 timestamp = state.accumulated_time[state.timeidx]
319 timestamp = state.accumulated_time[state.timeidx]
320 state.samples.append(Sample.from_frame(frame, timestamp))
320 state.samples.append(Sample.from_frame(frame, timestamp))
321
321
322 state.last_start_time = now
322 state.last_start_time = now
323 time.sleep(state.sample_interval)
323 time.sleep(state.sample_interval)
324
324
325 stopthread.clear()
325 stopthread.clear()
326
326
327
327
328 ###########################################################################
328 ###########################################################################
329 ## Profiling API
329 ## Profiling API
330
330
331
331
332 def is_active():
332 def is_active():
333 return state.profile_level > 0
333 return state.profile_level > 0
334
334
335
335
336 lastmechanism = None
336 lastmechanism = None
337
337
338
338
339 def start(mechanism=b'thread', track=b'cpu'):
339 def start(mechanism=b'thread', track=b'cpu'):
340 '''Install the profiling signal handler, and start profiling.'''
340 '''Install the profiling signal handler, and start profiling.'''
341 state.track = track # note: nesting different mode won't work
341 state.track = track # note: nesting different mode won't work
342 state.profile_level += 1
342 state.profile_level += 1
343 if state.profile_level == 1:
343 if state.profile_level == 1:
344 state.last_start_time = clock()
344 state.last_start_time = clock()
345 rpt = state.remaining_prof_time
345 rpt = state.remaining_prof_time
346 state.remaining_prof_time = None
346 state.remaining_prof_time = None
347
347
348 global lastmechanism
348 global lastmechanism
349 lastmechanism = mechanism
349 lastmechanism = mechanism
350
350
351 if mechanism == b'signal':
351 if mechanism == b'signal':
352 signal.signal(signal.SIGPROF, profile_signal_handler)
352 signal.signal(signal.SIGPROF, profile_signal_handler)
353 signal.setitimer(
353 signal.setitimer(
354 signal.ITIMER_PROF, rpt or state.sample_interval, 0.0
354 signal.ITIMER_PROF, rpt or state.sample_interval, 0.0
355 )
355 )
356 elif mechanism == b'thread':
356 elif mechanism == b'thread':
357 frame = inspect.currentframe()
357 frame = inspect.currentframe()
358 tid = [k for k, f in sys._current_frames().items() if f == frame][0]
358 tid = [k for k, f in sys._current_frames().items() if f == frame][0]
359 state.thread = threading.Thread(
359 state.thread = threading.Thread(
360 target=samplerthread, args=(tid,), name="samplerthread"
360 target=samplerthread, args=(tid,), name="samplerthread"
361 )
361 )
362 state.thread.start()
362 state.thread.start()
363
363
364
364
365 def stop():
365 def stop():
366 '''Stop profiling, and uninstall the profiling signal handler.'''
366 '''Stop profiling, and uninstall the profiling signal handler.'''
367 state.profile_level -= 1
367 state.profile_level -= 1
368 if state.profile_level == 0:
368 if state.profile_level == 0:
369 if lastmechanism == b'signal':
369 if lastmechanism == b'signal':
370 rpt = signal.setitimer(signal.ITIMER_PROF, 0.0, 0.0)
370 rpt = signal.setitimer(signal.ITIMER_PROF, 0.0, 0.0)
371 signal.signal(signal.SIGPROF, signal.SIG_IGN)
371 signal.signal(signal.SIGPROF, signal.SIG_IGN)
372 state.remaining_prof_time = rpt[0]
372 state.remaining_prof_time = rpt[0]
373 elif lastmechanism == b'thread':
373 elif lastmechanism == b'thread':
374 stopthread.set()
374 stopthread.set()
375 state.thread.join()
375 state.thread.join()
376
376
377 state.accumulate_time(clock())
377 state.accumulate_time(clock())
378 state.last_start_time = None
378 state.last_start_time = None
379 statprofpath = encoding.environ.get(b'STATPROF_DEST')
379 statprofpath = encoding.environ.get(b'STATPROF_DEST')
380 if statprofpath:
380 if statprofpath:
381 save_data(statprofpath)
381 save_data(statprofpath)
382
382
383 return state
383 return state
384
384
385
385
386 def save_data(path):
386 def save_data(path):
387 with open(path, b'w+') as file:
387 with open(path, b'w+') as file:
388 file.write(b"%f %f\n" % state.accumulated_time)
388 file.write(b"%f %f\n" % state.accumulated_time)
389 for sample in state.samples:
389 for sample in state.samples:
390 time = sample.time
390 time = sample.time
391 stack = sample.stack
391 stack = sample.stack
392 sites = [
392 sites = [
393 b'\1'.join([s.path, b'%d' % s.lineno or -1, s.function])
393 b'\1'.join([s.path, b'%d' % s.lineno or -1, s.function])
394 for s in stack
394 for s in stack
395 ]
395 ]
396 file.write(b"%d\0%s\n" % (time, b'\0'.join(sites)))
396 file.write(b"%d\0%s\n" % (time, b'\0'.join(sites)))
397
397
398
398
399 def load_data(path):
399 def load_data(path):
400 lines = open(path, b'rb').read().splitlines()
400 lines = open(path, b'rb').read().splitlines()
401
401
402 state.accumulated_time = [float(value) for value in lines[0].split()]
402 state.accumulated_time = [float(value) for value in lines[0].split()]
403 state.samples = []
403 state.samples = []
404 for line in lines[1:]:
404 for line in lines[1:]:
405 parts = line.split(b'\0')
405 parts = line.split(b'\0')
406 time = float(parts[0])
406 time = float(parts[0])
407 rawsites = parts[1:]
407 rawsites = parts[1:]
408 sites = []
408 sites = []
409 for rawsite in rawsites:
409 for rawsite in rawsites:
410 siteparts = rawsite.split(b'\1')
410 siteparts = rawsite.split(b'\1')
411 sites.append(
411 sites.append(
412 CodeSite.get(siteparts[0], int(siteparts[1]), siteparts[2])
412 CodeSite.get(siteparts[0], int(siteparts[1]), siteparts[2])
413 )
413 )
414
414
415 state.samples.append(Sample(sites, time))
415 state.samples.append(Sample(sites, time))
416
416
417
417
418 def reset(frequency=None):
418 def reset(frequency=None):
419 """Clear out the state of the profiler. Do not call while the
419 """Clear out the state of the profiler. Do not call while the
420 profiler is running.
420 profiler is running.
421
421
422 The optional frequency argument specifies the number of samples to
422 The optional frequency argument specifies the number of samples to
423 collect per second."""
423 collect per second."""
424 assert state.profile_level == 0, b"Can't reset() while statprof is running"
424 assert state.profile_level == 0, b"Can't reset() while statprof is running"
425 CodeSite.cache.clear()
425 CodeSite.cache.clear()
426 state.reset(frequency)
426 state.reset(frequency)
427
427
428
428
429 @contextmanager
429 @contextmanager
430 def profile():
430 def profile():
431 start()
431 start()
432 try:
432 try:
433 yield
433 yield
434 finally:
434 finally:
435 stop()
435 stop()
436 display()
436 display()
437
437
438
438
439 ###########################################################################
439 ###########################################################################
440 ## Reporting API
440 ## Reporting API
441
441
442
442
443 class SiteStats:
443 class SiteStats:
444 def __init__(self, site):
444 def __init__(self, site):
445 self.site = site
445 self.site = site
446 self.selfcount = 0
446 self.selfcount = 0
447 self.totalcount = 0
447 self.totalcount = 0
448
448
449 def addself(self):
449 def addself(self):
450 self.selfcount += 1
450 self.selfcount += 1
451
451
452 def addtotal(self):
452 def addtotal(self):
453 self.totalcount += 1
453 self.totalcount += 1
454
454
455 def selfpercent(self):
455 def selfpercent(self):
456 return self.selfcount / len(state.samples) * 100
456 return self.selfcount / len(state.samples) * 100
457
457
458 def totalpercent(self):
458 def totalpercent(self):
459 return self.totalcount / len(state.samples) * 100
459 return self.totalcount / len(state.samples) * 100
460
460
461 def selfseconds(self):
461 def selfseconds(self):
462 return self.selfcount * state.seconds_per_sample()
462 return self.selfcount * state.seconds_per_sample()
463
463
464 def totalseconds(self):
464 def totalseconds(self):
465 return self.totalcount * state.seconds_per_sample()
465 return self.totalcount * state.seconds_per_sample()
466
466
467 @classmethod
467 @classmethod
468 def buildstats(cls, samples):
468 def buildstats(cls, samples):
469 stats = {}
469 stats = {}
470
470
471 for sample in samples:
471 for sample in samples:
472 for i, site in enumerate(sample.stack):
472 for i, site in enumerate(sample.stack):
473 sitestat = stats.get(site)
473 sitestat = stats.get(site)
474 if not sitestat:
474 if not sitestat:
475 sitestat = SiteStats(site)
475 sitestat = SiteStats(site)
476 stats[site] = sitestat
476 stats[site] = sitestat
477
477
478 sitestat.addtotal()
478 sitestat.addtotal()
479
479
480 if i == 0:
480 if i == 0:
481 sitestat.addself()
481 sitestat.addself()
482
482
483 return [s for s in stats.values()]
483 return [s for s in stats.values()]
484
484
485
485
486 class DisplayFormats:
486 class DisplayFormats:
487 ByLine = 0
487 ByLine = 0
488 ByMethod = 1
488 ByMethod = 1
489 AboutMethod = 2
489 AboutMethod = 2
490 Hotpath = 3
490 Hotpath = 3
491 FlameGraph = 4
491 FlameGraph = 4
492 Json = 5
492 Json = 5
493 Chrome = 6
493 Chrome = 6
494
494
495
495
496 def display(fp=None, format=3, data=None, **kwargs):
496 def display(fp=None, format=3, data=None, **kwargs):
497 '''Print statistics, either to stdout or the given file object.'''
497 '''Print statistics, either to stdout or the given file object.'''
498 if data is None:
498 if data is None:
499 data = state
499 data = state
500
500
501 if fp is None:
501 if fp is None:
502 from .utils import procutil
502 from .utils import procutil
503
503
504 fp = procutil.stdout
504 fp = procutil.stdout
505 if len(data.samples) == 0:
505 if len(data.samples) == 0:
506 fp.write(b'No samples recorded.\n')
506 fp.write(b'No samples recorded.\n')
507 return
507 return
508
508
509 if format == DisplayFormats.ByLine:
509 if format == DisplayFormats.ByLine:
510 display_by_line(data, fp)
510 display_by_line(data, fp)
511 elif format == DisplayFormats.ByMethod:
511 elif format == DisplayFormats.ByMethod:
512 display_by_method(data, fp)
512 display_by_method(data, fp)
513 elif format == DisplayFormats.AboutMethod:
513 elif format == DisplayFormats.AboutMethod:
514 display_about_method(data, fp, **kwargs)
514 display_about_method(data, fp, **kwargs)
515 elif format == DisplayFormats.Hotpath:
515 elif format == DisplayFormats.Hotpath:
516 display_hotpath(data, fp, **kwargs)
516 display_hotpath(data, fp, **kwargs)
517 elif format == DisplayFormats.FlameGraph:
517 elif format == DisplayFormats.FlameGraph:
518 write_to_flame(data, fp, **kwargs)
518 write_to_flame(data, fp, **kwargs)
519 elif format == DisplayFormats.Json:
519 elif format == DisplayFormats.Json:
520 write_to_json(data, fp)
520 write_to_json(data, fp)
521 elif format == DisplayFormats.Chrome:
521 elif format == DisplayFormats.Chrome:
522 write_to_chrome(data, fp, **kwargs)
522 write_to_chrome(data, fp, **kwargs)
523 else:
523 else:
524 raise Exception("Invalid display format")
524 raise Exception("Invalid display format")
525
525
526 if format not in (DisplayFormats.Json, DisplayFormats.Chrome):
526 if format not in (DisplayFormats.Json, DisplayFormats.Chrome):
527 fp.write(b'---\n')
527 fp.write(b'---\n')
528 fp.write(b'Sample count: %d\n' % len(data.samples))
528 fp.write(b'Sample count: %d\n' % len(data.samples))
529 fp.write(b'Total time: %f seconds (%f wall)\n' % data.accumulated_time)
529 fp.write(b'Total time: %f seconds (%f wall)\n' % data.accumulated_time)
530
530
531
531
532 def display_by_line(data, fp):
532 def display_by_line(data, fp):
533 """Print the profiler data with each sample line represented
533 """Print the profiler data with each sample line represented
534 as one row in a table. Sorted by self-time per line."""
534 as one row in a table. Sorted by self-time per line."""
535 stats = SiteStats.buildstats(data.samples)
535 stats = SiteStats.buildstats(data.samples)
536 stats.sort(reverse=True, key=lambda x: x.selfseconds())
536 stats.sort(reverse=True, key=lambda x: x.selfseconds())
537
537
538 fp.write(
538 fp.write(
539 b'%5.5s %10.10s %7.7s %-8.8s\n'
539 b'%5.5s %10.10s %7.7s %-8.8s\n'
540 % (b'% ', b'cumulative', b'self', b'')
540 % (b'% ', b'cumulative', b'self', b'')
541 )
541 )
542 fp.write(
542 fp.write(
543 b'%5.5s %9.9s %8.8s %-8.8s\n'
543 b'%5.5s %9.9s %8.8s %-8.8s\n'
544 % (b"time", b"seconds", b"seconds", b"name")
544 % (b"time", b"seconds", b"seconds", b"name")
545 )
545 )
546
546
547 for stat in stats:
547 for stat in stats:
548 site = stat.site
548 site = stat.site
549 sitelabel = b'%s:%d:%s' % (
549 sitelabel = b'%s:%d:%s' % (
550 site.filename(),
550 site.filename(),
551 site.lineno or -1,
551 site.lineno or -1,
552 site.function,
552 site.function,
553 )
553 )
554 fp.write(
554 fp.write(
555 b'%6.2f %9.2f %9.2f %s\n'
555 b'%6.2f %9.2f %9.2f %s\n'
556 % (
556 % (
557 stat.selfpercent(),
557 stat.selfpercent(),
558 stat.totalseconds(),
558 stat.totalseconds(),
559 stat.selfseconds(),
559 stat.selfseconds(),
560 sitelabel,
560 sitelabel,
561 )
561 )
562 )
562 )
563
563
564
564
565 def display_by_method(data, fp):
565 def display_by_method(data, fp):
566 """Print the profiler data with each sample function represented
566 """Print the profiler data with each sample function represented
567 as one row in a table. Important lines within that function are
567 as one row in a table. Important lines within that function are
568 output as nested rows. Sorted by self-time per line."""
568 output as nested rows. Sorted by self-time per line."""
569 fp.write(
569 fp.write(
570 b'%5.5s %10.10s %7.7s %-8.8s\n'
570 b'%5.5s %10.10s %7.7s %-8.8s\n'
571 % (b'% ', b'cumulative', b'self', b'')
571 % (b'% ', b'cumulative', b'self', b'')
572 )
572 )
573 fp.write(
573 fp.write(
574 b'%5.5s %9.9s %8.8s %-8.8s\n'
574 b'%5.5s %9.9s %8.8s %-8.8s\n'
575 % (b"time", b"seconds", b"seconds", b"name")
575 % (b"time", b"seconds", b"seconds", b"name")
576 )
576 )
577
577
578 stats = SiteStats.buildstats(data.samples)
578 stats = SiteStats.buildstats(data.samples)
579
579
580 grouped = defaultdict(list)
580 grouped = defaultdict(list)
581 for stat in stats:
581 for stat in stats:
582 grouped[stat.site.filename() + b":" + stat.site.function].append(stat)
582 grouped[stat.site.filename() + b":" + stat.site.function].append(stat)
583
583
584 # compute sums for each function
584 # compute sums for each function
585 functiondata = []
585 functiondata = []
586 for fname, sitestats in grouped.items():
586 for fname, sitestats in grouped.items():
587 total_cum_sec = 0
587 total_cum_sec = 0
588 total_self_sec = 0
588 total_self_sec = 0
589 total_percent = 0
589 total_percent = 0
590 for stat in sitestats:
590 for stat in sitestats:
591 total_cum_sec += stat.totalseconds()
591 total_cum_sec += stat.totalseconds()
592 total_self_sec += stat.selfseconds()
592 total_self_sec += stat.selfseconds()
593 total_percent += stat.selfpercent()
593 total_percent += stat.selfpercent()
594
594
595 functiondata.append(
595 functiondata.append(
596 (fname, total_cum_sec, total_self_sec, total_percent, sitestats)
596 (fname, total_cum_sec, total_self_sec, total_percent, sitestats)
597 )
597 )
598
598
599 # sort by total self sec
599 # sort by total self sec
600 functiondata.sort(reverse=True, key=lambda x: x[2])
600 functiondata.sort(reverse=True, key=lambda x: x[2])
601
601
602 for function in functiondata:
602 for function in functiondata:
603 if function[3] < 0.05:
603 if function[3] < 0.05:
604 continue
604 continue
605 fp.write(
605 fp.write(
606 b'%6.2f %9.2f %9.2f %s\n'
606 b'%6.2f %9.2f %9.2f %s\n'
607 % (
607 % (
608 function[3], # total percent
608 function[3], # total percent
609 function[1], # total cum sec
609 function[1], # total cum sec
610 function[2], # total self sec
610 function[2], # total self sec
611 function[0],
611 function[0],
612 )
612 )
613 ) # file:function
613 ) # file:function
614
614
615 function[4].sort(reverse=True, key=lambda i: i.selfseconds())
615 function[4].sort(reverse=True, key=lambda i: i.selfseconds())
616 for stat in function[4]:
616 for stat in function[4]:
617 # only show line numbers for significant locations (>1% time spent)
617 # only show line numbers for significant locations (>1% time spent)
618 if stat.selfpercent() > 1:
618 if stat.selfpercent() > 1:
619 source = stat.site.getsource(25)
619 source = stat.site.getsource(25)
620 if not isinstance(source, bytes):
620 if not isinstance(source, bytes):
621 source = pycompat.bytestr(source)
621 source = pycompat.bytestr(source)
622
622
623 stattuple = (
623 stattuple = (
624 stat.selfpercent(),
624 stat.selfpercent(),
625 stat.selfseconds(),
625 stat.selfseconds(),
626 stat.site.lineno or -1,
626 stat.site.lineno or -1,
627 source,
627 source,
628 )
628 )
629
629
630 fp.write(b'%33.0f%% %6.2f line %d: %s\n' % stattuple)
630 fp.write(b'%33.0f%% %6.2f line %d: %s\n' % stattuple)
631
631
632
632
633 def display_about_method(data, fp, function=None, **kwargs):
633 def display_about_method(data, fp, function=None, **kwargs):
634 if function is None:
634 if function is None:
635 raise Exception("Invalid function")
635 raise Exception("Invalid function")
636
636
637 filename = None
637 filename = None
638 if b':' in function:
638 if b':' in function:
639 filename, function = function.split(b':')
639 filename, function = function.split(b':')
640
640
641 relevant_samples = 0
641 relevant_samples = 0
642 parents = {}
642 parents = {}
643 children = {}
643 children = {}
644
644
645 for sample in data.samples:
645 for sample in data.samples:
646 for i, site in enumerate(sample.stack):
646 for i, site in enumerate(sample.stack):
647 if site.function == function and (
647 if site.function == function and (
648 not filename or site.filename() == filename
648 not filename or site.filename() == filename
649 ):
649 ):
650 relevant_samples += 1
650 relevant_samples += 1
651 if i != len(sample.stack) - 1:
651 if i != len(sample.stack) - 1:
652 parent = sample.stack[i + 1]
652 parent = sample.stack[i + 1]
653 if parent in parents:
653 if parent in parents:
654 parents[parent] = parents[parent] + 1
654 parents[parent] = parents[parent] + 1
655 else:
655 else:
656 parents[parent] = 1
656 parents[parent] = 1
657
657
658 if site in children:
658 if site in children:
659 children[site] = children[site] + 1
659 children[site] = children[site] + 1
660 else:
660 else:
661 children[site] = 1
661 children[site] = 1
662
662
663 parents = [(parent, count) for parent, count in parents.items()]
663 parents = [(parent, count) for parent, count in parents.items()]
664 parents.sort(reverse=True, key=lambda x: x[1])
664 parents.sort(reverse=True, key=lambda x: x[1])
665 for parent, count in parents:
665 for parent, count in parents:
666 fp.write(
666 fp.write(
667 b'%6.2f%% %s:%s line %s: %s\n'
667 b'%6.2f%% %s:%s line %s: %s\n'
668 % (
668 % (
669 count / relevant_samples * 100,
669 count / relevant_samples * 100,
670 pycompat.fsencode(parent.filename()),
670 pycompat.fsencode(parent.filename()),
671 pycompat.sysbytes(parent.function),
671 pycompat.sysbytes(parent.function),
672 parent.lineno or -1,
672 parent.lineno or -1,
673 pycompat.sysbytes(parent.getsource(50)),
673 pycompat.sysbytes(parent.getsource(50)),
674 )
674 )
675 )
675 )
676
676
677 stats = SiteStats.buildstats(data.samples)
677 stats = SiteStats.buildstats(data.samples)
678 stats = [
678 stats = [
679 s
679 s
680 for s in stats
680 for s in stats
681 if s.site.function == function
681 if s.site.function == function
682 and (not filename or s.site.filename() == filename)
682 and (not filename or s.site.filename() == filename)
683 ]
683 ]
684
684
685 total_cum_sec = 0
685 total_cum_sec = 0
686 total_self_sec = 0
686 total_self_sec = 0
687 total_self_percent = 0
687 total_self_percent = 0
688 total_cum_percent = 0
688 total_cum_percent = 0
689 for stat in stats:
689 for stat in stats:
690 total_cum_sec += stat.totalseconds()
690 total_cum_sec += stat.totalseconds()
691 total_self_sec += stat.selfseconds()
691 total_self_sec += stat.selfseconds()
692 total_self_percent += stat.selfpercent()
692 total_self_percent += stat.selfpercent()
693 total_cum_percent += stat.totalpercent()
693 total_cum_percent += stat.totalpercent()
694
694
695 fp.write(
695 fp.write(
696 b'\n %s:%s Total: %0.2fs (%0.2f%%) Self: %0.2fs (%0.2f%%)\n\n'
696 b'\n %s:%s Total: %0.2fs (%0.2f%%) Self: %0.2fs (%0.2f%%)\n\n'
697 % (
697 % (
698 pycompat.sysbytes(filename or b'___'),
698 pycompat.sysbytes(filename or b'___'),
699 pycompat.sysbytes(function),
699 pycompat.sysbytes(function),
700 total_cum_sec,
700 total_cum_sec,
701 total_cum_percent,
701 total_cum_percent,
702 total_self_sec,
702 total_self_sec,
703 total_self_percent,
703 total_self_percent,
704 )
704 )
705 )
705 )
706
706
707 children = [(child, count) for child, count in children.items()]
707 children = [(child, count) for child, count in children.items()]
708 children.sort(reverse=True, key=lambda x: x[1])
708 children.sort(reverse=True, key=lambda x: x[1])
709 for child, count in children:
709 for child, count in children:
710 fp.write(
710 fp.write(
711 b' %6.2f%% line %s: %s\n'
711 b' %6.2f%% line %s: %s\n'
712 % (
712 % (
713 count / relevant_samples * 100,
713 count / relevant_samples * 100,
714 child.lineno or -1,
714 child.lineno or -1,
715 pycompat.sysbytes(child.getsource(50)),
715 pycompat.sysbytes(child.getsource(50)),
716 )
716 )
717 )
717 )
718
718
719
719
720 def display_hotpath(data, fp, limit=0.05, **kwargs):
720 def display_hotpath(data, fp, limit=0.05, **kwargs):
721 class HotNode:
721 class HotNode:
722 def __init__(self, site):
722 def __init__(self, site):
723 self.site = site
723 self.site = site
724 self.count = 0
724 self.count = 0
725 self.children = {}
725 self.children = {}
726
726
727 def add(self, stack, time):
727 def add(self, stack, time):
728 self.count += time
728 self.count += time
729 site = stack[0]
729 site = stack[0]
730 child = self.children.get(site)
730 child = self.children.get(site)
731 if not child:
731 if not child:
732 child = HotNode(site)
732 child = HotNode(site)
733 self.children[site] = child
733 self.children[site] = child
734
734
735 if len(stack) > 1:
735 if len(stack) > 1:
736 i = 1
736 i = 1
737 # Skip boiler plate parts of the stack
737 # Skip boiler plate parts of the stack
738 while i < len(stack) and stack[i].skipname() in skips:
738 while i < len(stack) and stack[i].skipname() in skips:
739 i += 1
739 i += 1
740 if i < len(stack):
740 if i < len(stack):
741 child.add(stack[i:], time)
741 child.add(stack[i:], time)
742 else:
742 else:
743 # Normally this is done by the .add() calls
743 # Normally this is done by the .add() calls
744 child.count += time
744 child.count += time
745
745
746 root = HotNode(None)
746 root = HotNode(None)
747 lasttime = data.samples[0].time
747 lasttime = data.samples[0].time
748 for sample in data.samples:
748 for sample in data.samples:
749 root.add(sample.stack[::-1], sample.time - lasttime)
749 root.add(sample.stack[::-1], sample.time - lasttime)
750 lasttime = sample.time
750 lasttime = sample.time
751 showtime = kwargs.get('showtime', True)
751 showtime = kwargs.get('showtime', True)
752
752
753 def _write(node, depth, multiple_siblings):
753 def _write(node, depth, multiple_siblings):
754 site = node.site
754 site = node.site
755 visiblechildren = [
755 visiblechildren = [
756 c for c in node.children.values() if c.count >= (limit * root.count)
756 c for c in node.children.values() if c.count >= (limit * root.count)
757 ]
757 ]
758 if site:
758 if site:
759 indent = depth * 2 - 1
759 indent = depth * 2 - 1
760 filename = (site.filename() + b':').ljust(15)
760 filename = (site.filename() + b':').ljust(15)
761 function = site.function
761 function = site.function
762
762
763 # lots of string formatting
763 # lots of string formatting
764 listpattern = (
764 listpattern = (
765 b''.ljust(indent)
765 b''.ljust(indent)
766 + (b'\\' if multiple_siblings else b'|')
766 + (b'\\' if multiple_siblings else b'|')
767 + b' %4.1f%%'
767 + b' %4.1f%%'
768 + (b' %5.2fs' % node.count if showtime else b'')
768 + (b' %5.2fs' % node.count if showtime else b'')
769 + b' %s %s'
769 + b' %s %s'
770 )
770 )
771 liststring = listpattern % (
771 liststring = listpattern % (
772 node.count / root.count * 100,
772 node.count / root.count * 100,
773 filename,
773 filename,
774 function,
774 function,
775 )
775 )
776 # 4 to account for the word 'line'
776 # 4 to account for the word 'line'
777 spacing_len = max(4, 55 - len(liststring))
777 spacing_len = max(4, 55 - len(liststring))
778 prefix = b''
778 prefix = b''
779 if spacing_len == 4:
779 if spacing_len == 4:
780 prefix = b', '
780 prefix = b', '
781
781
782 codepattern = b'%s%s %d: %s%s'
782 codepattern = b'%s%s %d: %s%s'
783 codestring = codepattern % (
783 codestring = codepattern % (
784 prefix,
784 prefix,
785 b'line'.rjust(spacing_len),
785 b'line'.rjust(spacing_len),
786 site.lineno if site.lineno is not None else -1,
786 site.lineno if site.lineno is not None else -1,
787 b''.ljust(max(0, 4 - len(str(site.lineno)))),
787 b''.ljust(max(0, 4 - len(str(site.lineno)))),
788 site.getsource(30),
788 site.getsource(30),
789 )
789 )
790
790
791 finalstring = liststring + codestring
791 finalstring = liststring + codestring
792 childrensamples = sum([c.count for c in node.children.values()])
792 childrensamples = sum([c.count for c in node.children.values()])
793 # Make frames that performed more than 10% of the operation red
793 # Make frames that performed more than 10% of the operation red
794 if node.count - childrensamples > (0.1 * root.count):
794 if node.count - childrensamples > (0.1 * root.count):
795 finalstring = b'\033[91m' + finalstring + b'\033[0m'
795 finalstring = b'\033[91m' + finalstring + b'\033[0m'
796 # Make frames that didn't actually perform work dark grey
796 # Make frames that didn't actually perform work dark grey
797 elif node.count - childrensamples == 0:
797 elif node.count - childrensamples == 0:
798 finalstring = b'\033[90m' + finalstring + b'\033[0m'
798 finalstring = b'\033[90m' + finalstring + b'\033[0m'
799 fp.write(finalstring + b'\n')
799 fp.write(finalstring + b'\n')
800
800
801 newdepth = depth
801 newdepth = depth
802 if len(visiblechildren) > 1 or multiple_siblings:
802 if len(visiblechildren) > 1 or multiple_siblings:
803 newdepth += 1
803 newdepth += 1
804
804
805 visiblechildren.sort(reverse=True, key=lambda x: x.count)
805 visiblechildren.sort(reverse=True, key=lambda x: x.count)
806 for child in visiblechildren:
806 for child in visiblechildren:
807 _write(child, newdepth, len(visiblechildren) > 1)
807 _write(child, newdepth, len(visiblechildren) > 1)
808
808
809 if root.count > 0:
809 if root.count > 0:
810 _write(root, 0, False)
810 _write(root, 0, False)
811
811
812
812
813 def write_to_flame(data, fp, scriptpath=None, outputfile=None, **kwargs):
813 def write_to_flame(data, fp, scriptpath=None, outputfile=None, **kwargs):
814 if scriptpath is None:
814 if scriptpath is None:
815 scriptpath = encoding.environ[b'HOME'] + b'/flamegraph.pl'
815 scriptpath = encoding.environ[b'HOME'] + b'/flamegraph.pl'
816 if not os.path.exists(scriptpath):
816 if not os.path.exists(scriptpath):
817 fp.write(b'error: missing %s\n' % scriptpath)
817 fp.write(b'error: missing %s\n' % scriptpath)
818 fp.write(b'get it here: https://github.com/brendangregg/FlameGraph\n')
818 fp.write(b'get it here: https://github.com/brendangregg/FlameGraph\n')
819 return
819 return
820
820
821 lines = {}
821 lines = {}
822 for sample in data.samples:
822 for sample in data.samples:
823 sites = [s.function for s in sample.stack]
823 sites = [s.function for s in sample.stack]
824 sites.reverse()
824 sites.reverse()
825 line = b';'.join(sites)
825 line = b';'.join(sites)
826 if line in lines:
826 if line in lines:
827 lines[line] = lines[line] + 1
827 lines[line] = lines[line] + 1
828 else:
828 else:
829 lines[line] = 1
829 lines[line] = 1
830
830
831 fd, path = pycompat.mkstemp()
831 fd, path = pycompat.mkstemp()
832
832
833 with open(path, b"w+") as file:
833 with open(path, b"w+") as file:
834 for line, count in lines.items():
834 for line, count in lines.items():
835 file.write(b"%s %d\n" % (line, count))
835 file.write(b"%s %d\n" % (line, count))
836
836
837 if outputfile is None:
837 if outputfile is None:
838 outputfile = b'~/flamegraph.svg'
838 outputfile = b'~/flamegraph.svg'
839
839
840 os.system(b"perl ~/flamegraph.pl %s > %s" % (path, outputfile))
840 os.system(b"perl ~/flamegraph.pl %s > %s" % (path, outputfile))
841 fp.write(b'Written to %s\n' % outputfile)
841 fp.write(b'Written to %s\n' % outputfile)
842
842
843
843
844 _pathcache = {}
844 _pathcache = {}
845
845
846
846
847 def simplifypath(path):
847 def simplifypath(path):
848 """Attempt to make the path to a Python module easier to read by
848 """Attempt to make the path to a Python module easier to read by
849 removing whatever part of the Python search path it was found
849 removing whatever part of the Python search path it was found
850 on."""
850 on."""
851
851
852 if path in _pathcache:
852 if path in _pathcache:
853 return _pathcache[path]
853 return _pathcache[path]
854 hgpath = encoding.__file__.rsplit(os.sep, 2)[0]
854 hgpath = encoding.__file__.rsplit(os.sep, 2)[0]
855 for p in [hgpath] + sys.path:
855 for p in [hgpath] + sys.path:
856 prefix = p + os.sep
856 prefix = p + os.sep
857 if path.startswith(prefix):
857 if path.startswith(prefix):
858 path = path[len(prefix) :]
858 path = path[len(prefix) :]
859 break
859 break
860 _pathcache[path] = path
860 _pathcache[path] = path
861 return path
861 return path
862
862
863
863
864 def write_to_json(data, fp):
864 def write_to_json(data, fp):
865 samples = []
865 samples = []
866
866
867 for sample in data.samples:
867 for sample in data.samples:
868 stack = []
868 stack = []
869
869
870 for frame in sample.stack:
870 for frame in sample.stack:
871 stack.append(
871 stack.append(
872 (
872 (
873 pycompat.sysstr(frame.path),
873 pycompat.sysstr(frame.path),
874 frame.lineno or -1,
874 frame.lineno or -1,
875 pycompat.sysstr(frame.function),
875 pycompat.sysstr(frame.function),
876 )
876 )
877 )
877 )
878
878
879 samples.append((sample.time, stack))
879 samples.append((sample.time, stack))
880
880
881 data = json.dumps(samples)
881 data = json.dumps(samples)
882 if not isinstance(data, bytes):
882 if not isinstance(data, bytes):
883 data = data.encode('utf-8')
883 data = data.encode('utf-8')
884
884
885 fp.write(data)
885 fp.write(data)
886
886
887
887
888 def write_to_chrome(data, fp, minthreshold=0.005, maxthreshold=0.999):
888 def write_to_chrome(data, fp, minthreshold=0.005, maxthreshold=0.999):
889 samples = []
889 samples = []
890 laststack = collections.deque()
890 laststack = collections.deque()
891 lastseen = collections.deque()
891 lastseen = collections.deque()
892
892
893 # The Chrome tracing format allows us to use a compact stack
893 # The Chrome tracing format allows us to use a compact stack
894 # representation to save space. It's fiddly but worth it.
894 # representation to save space. It's fiddly but worth it.
895 # We maintain a bijection between stack and ID.
895 # We maintain a bijection between stack and ID.
896 stack2id = {}
896 stack2id = {}
897 id2stack = [] # will eventually be rendered
897 id2stack = [] # will eventually be rendered
898
898
899 def stackid(stack):
899 def stackid(stack):
900 if not stack:
900 if not stack:
901 return
901 return
902 if stack in stack2id:
902 if stack in stack2id:
903 return stack2id[stack]
903 return stack2id[stack]
904 parent = stackid(stack[1:])
904 parent = stackid(stack[1:])
905 myid = len(stack2id)
905 myid = len(stack2id)
906 stack2id[stack] = myid
906 stack2id[stack] = myid
907 id2stack.append(dict(category=stack[0][0], name='%s %s' % stack[0]))
907 id2stack.append(dict(category=stack[0][0], name='%s %s' % stack[0]))
908 if parent is not None:
908 if parent is not None:
909 id2stack[-1].update(parent=parent)
909 id2stack[-1].update(parent=parent)
910 return myid
910 return myid
911
911
912 # The sampling profiler can sample multiple times without
912 # The sampling profiler can sample multiple times without
913 # advancing the clock, potentially causing the Chrome trace viewer
913 # advancing the clock, potentially causing the Chrome trace viewer
914 # to render single-pixel columns that we cannot zoom in on. We
914 # to render single-pixel columns that we cannot zoom in on. We
915 # work around this by pretending that zero-duration samples are a
915 # work around this by pretending that zero-duration samples are a
916 # millisecond in length.
916 # millisecond in length.
917
917
918 clamp = 0.001
918 clamp = 0.001
919
919
920 # We provide knobs that by default attempt to filter out stack
920 # We provide knobs that by default attempt to filter out stack
921 # frames that are too noisy:
921 # frames that are too noisy:
922 #
922 #
923 # * A few take almost all execution time. These are usually boring
923 # * A few take almost all execution time. These are usually boring
924 # setup functions, giving a stack that is deep but uninformative.
924 # setup functions, giving a stack that is deep but uninformative.
925 #
925 #
926 # * Numerous samples take almost no time, but introduce lots of
926 # * Numerous samples take almost no time, but introduce lots of
927 # noisy, oft-deep "spines" into a rendered profile.
927 # noisy, oft-deep "spines" into a rendered profile.
928
928
929 blacklist = set()
929 blacklist = set()
930 totaltime = data.samples[-1].time - data.samples[0].time
930 totaltime = data.samples[-1].time - data.samples[0].time
931 minthreshold = totaltime * minthreshold
931 minthreshold = totaltime * minthreshold
932 maxthreshold = max(totaltime * maxthreshold, clamp)
932 maxthreshold = max(totaltime * maxthreshold, clamp)
933
933
934 def poplast():
934 def poplast():
935 oldsid = stackid(tuple(laststack))
935 oldsid = stackid(tuple(laststack))
936 oldcat, oldfunc = laststack.popleft()
936 oldcat, oldfunc = laststack.popleft()
937 oldtime, oldidx = lastseen.popleft()
937 oldtime, oldidx = lastseen.popleft()
938 duration = sample.time - oldtime
938 duration = sample.time - oldtime
939 if minthreshold <= duration <= maxthreshold:
939 if minthreshold <= duration <= maxthreshold:
940 # ensure no zero-duration events
940 # ensure no zero-duration events
941 sampletime = max(oldtime + clamp, sample.time)
941 sampletime = max(oldtime + clamp, sample.time)
942 samples.append(
942 samples.append(
943 dict(
943 dict(
944 ph='E',
944 ph='E',
945 name=oldfunc,
945 name=oldfunc,
946 cat=oldcat,
946 cat=oldcat,
947 sf=oldsid,
947 sf=oldsid,
948 ts=sampletime * 1e6,
948 ts=sampletime * 1e6,
949 pid=0,
949 pid=0,
950 )
950 )
951 )
951 )
952 else:
952 else:
953 blacklist.add(oldidx)
953 blacklist.add(oldidx)
954
954
955 # Much fiddling to synthesize correctly(ish) nested begin/end
955 # Much fiddling to synthesize correctly(ish) nested begin/end
956 # events given only stack snapshots.
956 # events given only stack snapshots.
957
957
958 for sample in data.samples:
958 for sample in data.samples:
959 stack = tuple(
959 stack = tuple(
960 (
960 (
961 (
961 (
962 '%s:%d'
962 '%s:%d'
963 % (
963 % (
964 simplifypath(pycompat.sysstr(frame.path)),
964 simplifypath(pycompat.sysstr(frame.path)),
965 frame.lineno or -1,
965 frame.lineno or -1,
966 ),
966 ),
967 pycompat.sysstr(frame.function),
967 pycompat.sysstr(frame.function),
968 )
968 )
969 for frame in sample.stack
969 for frame in sample.stack
970 )
970 )
971 )
971 )
972 qstack = collections.deque(stack)
972 qstack = collections.deque(stack)
973 if laststack == qstack:
973 if laststack == qstack:
974 continue
974 continue
975 while laststack and qstack and laststack[-1] == qstack[-1]:
975 while laststack and qstack and laststack[-1] == qstack[-1]:
976 laststack.pop()
976 laststack.pop()
977 qstack.pop()
977 qstack.pop()
978 while laststack:
978 while laststack:
979 poplast()
979 poplast()
980 for f in reversed(qstack):
980 for f in reversed(qstack):
981 lastseen.appendleft((sample.time, len(samples)))
981 lastseen.appendleft((sample.time, len(samples)))
982 laststack.appendleft(f)
982 laststack.appendleft(f)
983 path, name = f
983 path, name = f
984 sid = stackid(tuple(laststack))
984 sid = stackid(tuple(laststack))
985 samples.append(
985 samples.append(
986 dict(
986 dict(
987 ph='B',
987 ph='B',
988 name=name,
988 name=name,
989 cat=path,
989 cat=path,
990 ts=sample.time * 1e6,
990 ts=sample.time * 1e6,
991 sf=sid,
991 sf=sid,
992 pid=0,
992 pid=0,
993 )
993 )
994 )
994 )
995 laststack = collections.deque(stack)
995 laststack = collections.deque(stack)
996 while laststack:
996 while laststack:
997 poplast()
997 poplast()
998 events = [
998 events = [
999 sample for idx, sample in enumerate(samples) if idx not in blacklist
999 sample for idx, sample in enumerate(samples) if idx not in blacklist
1000 ]
1000 ]
1001 frames = collections.OrderedDict(
1001 frames = collections.OrderedDict(
1002 (str(k), v) for (k, v) in enumerate(id2stack)
1002 (str(k), v) for (k, v) in enumerate(id2stack)
1003 )
1003 )
1004 data = json.dumps(dict(traceEvents=events, stackFrames=frames), indent=1)
1004 data = json.dumps(dict(traceEvents=events, stackFrames=frames), indent=1)
1005 if not isinstance(data, bytes):
1005 if not isinstance(data, bytes):
1006 data = data.encode('utf-8')
1006 data = data.encode('utf-8')
1007 fp.write(data)
1007 fp.write(data)
1008 fp.write(b'\n')
1008 fp.write(b'\n')
1009
1009
1010
1010
1011 def printusage():
1011 def printusage():
1012 print(
1012 print(
1013 r"""
1013 r"""
1014 The statprof command line allows you to inspect the last profile's results in
1014 The statprof command line allows you to inspect the last profile's results in
1015 the following forms:
1015 the following forms:
1016
1016
1017 usage:
1017 usage:
1018 hotpath [-l --limit percent]
1018 hotpath [-l --limit percent]
1019 Shows a graph of calls with the percent of time each takes.
1019 Shows a graph of calls with the percent of time each takes.
1020 Red calls take over 10%% of the total time themselves.
1020 Red calls take over 10%% of the total time themselves.
1021 lines
1021 lines
1022 Shows the actual sampled lines.
1022 Shows the actual sampled lines.
1023 functions
1023 functions
1024 Shows the samples grouped by function.
1024 Shows the samples grouped by function.
1025 function [filename:]functionname
1025 function [filename:]functionname
1026 Shows the callers and callees of a particular function.
1026 Shows the callers and callees of a particular function.
1027 flame [-s --script-path] [-o --output-file path]
1027 flame [-s --script-path] [-o --output-file path]
1028 Writes out a flamegraph to output-file (defaults to ~/flamegraph.svg)
1028 Writes out a flamegraph to output-file (defaults to ~/flamegraph.svg)
1029 Requires that ~/flamegraph.pl exist.
1029 Requires that ~/flamegraph.pl exist.
1030 (Specify alternate script path with --script-path.)"""
1030 (Specify alternate script path with --script-path.)"""
1031 )
1031 )
1032
1032
1033
1033
1034 def main(argv=None):
1034 def main(argv=None):
1035 if argv is None:
1035 if argv is None:
1036 argv = sys.argv
1036 argv = sys.argv
1037
1037
1038 if len(argv) == 1:
1038 if len(argv) == 1:
1039 printusage()
1039 printusage()
1040 return 0
1040 return 0
1041
1041
1042 displayargs = {}
1042 displayargs = {}
1043
1043
1044 optstart = 2
1044 optstart = 2
1045 displayargs[b'function'] = None
1045 displayargs[b'function'] = None
1046 if argv[1] == 'hotpath':
1046 if argv[1] == 'hotpath':
1047 displayargs[b'format'] = DisplayFormats.Hotpath
1047 displayargs[b'format'] = DisplayFormats.Hotpath
1048 elif argv[1] == 'lines':
1048 elif argv[1] == 'lines':
1049 displayargs[b'format'] = DisplayFormats.ByLine
1049 displayargs[b'format'] = DisplayFormats.ByLine
1050 elif argv[1] == 'functions':
1050 elif argv[1] == 'functions':
1051 displayargs[b'format'] = DisplayFormats.ByMethod
1051 displayargs[b'format'] = DisplayFormats.ByMethod
1052 elif argv[1] == 'function':
1052 elif argv[1] == 'function':
1053 displayargs[b'format'] = DisplayFormats.AboutMethod
1053 displayargs[b'format'] = DisplayFormats.AboutMethod
1054 displayargs[b'function'] = argv[2]
1054 displayargs[b'function'] = argv[2]
1055 optstart = 3
1055 optstart = 3
1056 elif argv[1] == 'flame':
1056 elif argv[1] == 'flame':
1057 displayargs[b'format'] = DisplayFormats.FlameGraph
1057 displayargs[b'format'] = DisplayFormats.FlameGraph
1058 else:
1058 else:
1059 printusage()
1059 printusage()
1060 return 0
1060 return 0
1061
1061
1062 # process options
1062 # process options
1063 try:
1063 try:
1064 opts, args = pycompat.getoptb(
1064 opts, args = pycompat.getoptb(
1065 pycompat.sysargv[optstart:],
1065 pycompat.sysargv[optstart:],
1066 b"hl:f:o:p:",
1066 b"hl:f:o:p:",
1067 [b"help", b"limit=", b"file=", b"output-file=", b"script-path="],
1067 [b"help", b"limit=", b"file=", b"output-file=", b"script-path="],
1068 )
1068 )
1069 except getopt.error as msg:
1069 except getopt.error as msg:
1070 print(msg)
1070 print(msg)
1071 printusage()
1071 printusage()
1072 return 2
1072 return 2
1073
1073
1074 displayargs[b'limit'] = 0.05
1074 displayargs[b'limit'] = 0.05
1075 path = None
1075 path = None
1076 for o, value in opts:
1076 for o, value in opts:
1077 if o in ("-l", "--limit"):
1077 if o in ("-l", "--limit"):
1078 displayargs[b'limit'] = float(value)
1078 displayargs[b'limit'] = float(value)
1079 elif o in ("-f", "--file"):
1079 elif o in ("-f", "--file"):
1080 path = value
1080 path = value
1081 elif o in ("-o", "--output-file"):
1081 elif o in ("-o", "--output-file"):
1082 displayargs[b'outputfile'] = value
1082 displayargs[b'outputfile'] = value
1083 elif o in ("-p", "--script-path"):
1083 elif o in ("-p", "--script-path"):
1084 displayargs[b'scriptpath'] = value
1084 displayargs[b'scriptpath'] = value
1085 elif o in ("-h", "help"):
1085 elif o in ("-h", "help"):
1086 printusage()
1086 printusage()
1087 return 0
1087 return 0
1088 else:
1088 else:
1089 assert False, "unhandled option %s" % o
1089 assert False, "unhandled option %s" % o
1090
1090
1091 if not path:
1091 if not path:
1092 print('must specify --file to load')
1092 print('must specify --file to load')
1093 return 1
1093 return 1
1094
1094
1095 load_data(path=path)
1095 load_data(path=path)
1096
1096
1097 display(**pycompat.strkwargs(displayargs))
1097 display(**pycompat.strkwargs(displayargs))
1098
1098
1099 return 0
1099 return 0
1100
1100
1101
1101
1102 if __name__ == "__main__":
1102 if __name__ == "__main__":
1103 sys.exit(main())
1103 sys.exit(main())
@@ -1,1773 +1,1774
1 # This file is automatically @generated by Cargo.
1 # This file is automatically @generated by Cargo.
2 # It is not intended for manual editing.
2 # It is not intended for manual editing.
3 version = 3
3 version = 3
4
4
5 [[package]]
5 [[package]]
6 name = "adler"
6 name = "adler"
7 version = "1.0.2"
7 version = "1.0.2"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
9 checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
9 checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
10
10
11 [[package]]
11 [[package]]
12 name = "ahash"
12 name = "ahash"
13 version = "0.8.2"
13 version = "0.8.2"
14 source = "registry+https://github.com/rust-lang/crates.io-index"
14 source = "registry+https://github.com/rust-lang/crates.io-index"
15 checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
15 checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
16 dependencies = [
16 dependencies = [
17 "cfg-if",
17 "cfg-if",
18 "once_cell",
18 "once_cell",
19 "version_check",
19 "version_check",
20 ]
20 ]
21
21
22 [[package]]
22 [[package]]
23 name = "aho-corasick"
23 name = "aho-corasick"
24 version = "0.7.19"
24 version = "0.7.19"
25 source = "registry+https://github.com/rust-lang/crates.io-index"
25 source = "registry+https://github.com/rust-lang/crates.io-index"
26 checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
26 checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
27 dependencies = [
27 dependencies = [
28 "memchr",
28 "memchr",
29 ]
29 ]
30
30
31 [[package]]
31 [[package]]
32 name = "android-tzdata"
32 name = "android-tzdata"
33 version = "0.1.1"
33 version = "0.1.1"
34 source = "registry+https://github.com/rust-lang/crates.io-index"
34 source = "registry+https://github.com/rust-lang/crates.io-index"
35 checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
35 checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
36
36
37 [[package]]
37 [[package]]
38 name = "android_system_properties"
38 name = "android_system_properties"
39 version = "0.1.5"
39 version = "0.1.5"
40 source = "registry+https://github.com/rust-lang/crates.io-index"
40 source = "registry+https://github.com/rust-lang/crates.io-index"
41 checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
41 checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
42 dependencies = [
42 dependencies = [
43 "libc",
43 "libc",
44 ]
44 ]
45
45
46 [[package]]
46 [[package]]
47 name = "atty"
47 name = "atty"
48 version = "0.2.14"
48 version = "0.2.14"
49 source = "registry+https://github.com/rust-lang/crates.io-index"
49 source = "registry+https://github.com/rust-lang/crates.io-index"
50 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
50 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
51 dependencies = [
51 dependencies = [
52 "hermit-abi",
52 "hermit-abi",
53 "libc",
53 "libc",
54 "winapi",
54 "winapi",
55 ]
55 ]
56
56
57 [[package]]
57 [[package]]
58 name = "autocfg"
58 name = "autocfg"
59 version = "1.1.0"
59 version = "1.1.0"
60 source = "registry+https://github.com/rust-lang/crates.io-index"
60 source = "registry+https://github.com/rust-lang/crates.io-index"
61 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
61 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
62
62
63 [[package]]
63 [[package]]
64 name = "bitflags"
64 name = "bitflags"
65 version = "1.3.2"
65 version = "1.3.2"
66 source = "registry+https://github.com/rust-lang/crates.io-index"
66 source = "registry+https://github.com/rust-lang/crates.io-index"
67 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
67 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
68
68
69 [[package]]
69 [[package]]
70 name = "bitflags"
70 name = "bitflags"
71 version = "2.6.0"
71 version = "2.6.0"
72 source = "registry+https://github.com/rust-lang/crates.io-index"
72 source = "registry+https://github.com/rust-lang/crates.io-index"
73 checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
73 checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
74
74
75 [[package]]
75 [[package]]
76 name = "bitmaps"
76 name = "bitmaps"
77 version = "2.1.0"
77 version = "2.1.0"
78 source = "registry+https://github.com/rust-lang/crates.io-index"
78 source = "registry+https://github.com/rust-lang/crates.io-index"
79 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
79 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
80 dependencies = [
80 dependencies = [
81 "typenum",
81 "typenum",
82 ]
82 ]
83
83
84 [[package]]
84 [[package]]
85 name = "bitvec"
85 name = "bitvec"
86 version = "1.0.1"
86 version = "1.0.1"
87 source = "registry+https://github.com/rust-lang/crates.io-index"
87 source = "registry+https://github.com/rust-lang/crates.io-index"
88 checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
88 checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
89 dependencies = [
89 dependencies = [
90 "funty",
90 "funty",
91 "radium",
91 "radium",
92 "tap",
92 "tap",
93 "wyz",
93 "wyz",
94 ]
94 ]
95
95
96 [[package]]
96 [[package]]
97 name = "block-buffer"
97 name = "block-buffer"
98 version = "0.9.0"
98 version = "0.9.0"
99 source = "registry+https://github.com/rust-lang/crates.io-index"
99 source = "registry+https://github.com/rust-lang/crates.io-index"
100 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
100 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
101 dependencies = [
101 dependencies = [
102 "generic-array",
102 "generic-array",
103 ]
103 ]
104
104
105 [[package]]
105 [[package]]
106 name = "block-buffer"
106 name = "block-buffer"
107 version = "0.10.3"
107 version = "0.10.3"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
109 checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
109 checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
110 dependencies = [
110 dependencies = [
111 "generic-array",
111 "generic-array",
112 ]
112 ]
113
113
114 [[package]]
114 [[package]]
115 name = "bstr"
115 name = "bstr"
116 version = "1.6.0"
116 version = "1.6.0"
117 source = "registry+https://github.com/rust-lang/crates.io-index"
117 source = "registry+https://github.com/rust-lang/crates.io-index"
118 checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
118 checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
119 dependencies = [
119 dependencies = [
120 "memchr",
120 "memchr",
121 "regex-automata",
121 "regex-automata",
122 "serde",
122 "serde",
123 ]
123 ]
124
124
125 [[package]]
125 [[package]]
126 name = "bumpalo"
126 name = "bumpalo"
127 version = "3.11.1"
127 version = "3.11.1"
128 source = "registry+https://github.com/rust-lang/crates.io-index"
128 source = "registry+https://github.com/rust-lang/crates.io-index"
129 checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
129 checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
130
130
131 [[package]]
131 [[package]]
132 name = "byteorder"
132 name = "byteorder"
133 version = "1.4.3"
133 version = "1.4.3"
134 source = "registry+https://github.com/rust-lang/crates.io-index"
134 source = "registry+https://github.com/rust-lang/crates.io-index"
135 checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
135 checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
136
136
137 [[package]]
137 [[package]]
138 name = "bytes-cast"
138 name = "bytes-cast"
139 version = "0.3.0"
139 version = "0.3.0"
140 source = "registry+https://github.com/rust-lang/crates.io-index"
140 source = "registry+https://github.com/rust-lang/crates.io-index"
141 checksum = "a20de93b91d7703ca0e39e12930e310acec5ff4d715f4166e0ab026babb352e8"
141 checksum = "a20de93b91d7703ca0e39e12930e310acec5ff4d715f4166e0ab026babb352e8"
142 dependencies = [
142 dependencies = [
143 "bytes-cast-derive",
143 "bytes-cast-derive",
144 ]
144 ]
145
145
146 [[package]]
146 [[package]]
147 name = "bytes-cast-derive"
147 name = "bytes-cast-derive"
148 version = "0.2.0"
148 version = "0.2.0"
149 source = "registry+https://github.com/rust-lang/crates.io-index"
149 source = "registry+https://github.com/rust-lang/crates.io-index"
150 checksum = "7470a6fcce58cde3d62cce758bf71007978b75247e6becd9255c9b884bcb4f71"
150 checksum = "7470a6fcce58cde3d62cce758bf71007978b75247e6becd9255c9b884bcb4f71"
151 dependencies = [
151 dependencies = [
152 "proc-macro2",
152 "proc-macro2",
153 "quote",
153 "quote",
154 "syn",
154 "syn",
155 ]
155 ]
156
156
157 [[package]]
157 [[package]]
158 name = "cc"
158 name = "cc"
159 version = "1.0.76"
159 version = "1.0.76"
160 source = "registry+https://github.com/rust-lang/crates.io-index"
160 source = "registry+https://github.com/rust-lang/crates.io-index"
161 checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
161 checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
162 dependencies = [
162 dependencies = [
163 "jobserver",
163 "jobserver",
164 ]
164 ]
165
165
166 [[package]]
166 [[package]]
167 name = "cfg-if"
167 name = "cfg-if"
168 version = "1.0.0"
168 version = "1.0.0"
169 source = "registry+https://github.com/rust-lang/crates.io-index"
169 source = "registry+https://github.com/rust-lang/crates.io-index"
170 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
170 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
171
171
172 [[package]]
172 [[package]]
173 name = "chrono"
173 name = "chrono"
174 version = "0.4.34"
174 version = "0.4.34"
175 source = "registry+https://github.com/rust-lang/crates.io-index"
175 source = "registry+https://github.com/rust-lang/crates.io-index"
176 checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
176 checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
177 dependencies = [
177 dependencies = [
178 "android-tzdata",
178 "android-tzdata",
179 "iana-time-zone",
179 "iana-time-zone",
180 "js-sys",
180 "js-sys",
181 "num-traits",
181 "num-traits",
182 "wasm-bindgen",
182 "wasm-bindgen",
183 "windows-targets 0.52.0",
183 "windows-targets 0.52.0",
184 ]
184 ]
185
185
186 [[package]]
186 [[package]]
187 name = "clap"
187 name = "clap"
188 version = "4.0.24"
188 version = "4.0.24"
189 source = "registry+https://github.com/rust-lang/crates.io-index"
189 source = "registry+https://github.com/rust-lang/crates.io-index"
190 checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
190 checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
191 dependencies = [
191 dependencies = [
192 "atty",
192 "atty",
193 "bitflags 1.3.2",
193 "bitflags 1.3.2",
194 "clap_derive",
194 "clap_derive",
195 "clap_lex",
195 "clap_lex",
196 "once_cell",
196 "once_cell",
197 "strsim",
197 "strsim",
198 "termcolor",
198 "termcolor",
199 ]
199 ]
200
200
201 [[package]]
201 [[package]]
202 name = "clap_derive"
202 name = "clap_derive"
203 version = "4.0.21"
203 version = "4.0.21"
204 source = "registry+https://github.com/rust-lang/crates.io-index"
204 source = "registry+https://github.com/rust-lang/crates.io-index"
205 checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
205 checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
206 dependencies = [
206 dependencies = [
207 "heck",
207 "heck",
208 "proc-macro-error",
208 "proc-macro-error",
209 "proc-macro2",
209 "proc-macro2",
210 "quote",
210 "quote",
211 "syn",
211 "syn",
212 ]
212 ]
213
213
214 [[package]]
214 [[package]]
215 name = "clap_lex"
215 name = "clap_lex"
216 version = "0.3.0"
216 version = "0.3.0"
217 source = "registry+https://github.com/rust-lang/crates.io-index"
217 source = "registry+https://github.com/rust-lang/crates.io-index"
218 checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
218 checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
219 dependencies = [
219 dependencies = [
220 "os_str_bytes",
220 "os_str_bytes",
221 ]
221 ]
222
222
223 [[package]]
223 [[package]]
224 name = "codespan-reporting"
224 name = "codespan-reporting"
225 version = "0.11.1"
225 version = "0.11.1"
226 source = "registry+https://github.com/rust-lang/crates.io-index"
226 source = "registry+https://github.com/rust-lang/crates.io-index"
227 checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
227 checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
228 dependencies = [
228 dependencies = [
229 "termcolor",
229 "termcolor",
230 "unicode-width",
230 "unicode-width",
231 ]
231 ]
232
232
233 [[package]]
233 [[package]]
234 name = "convert_case"
234 name = "convert_case"
235 version = "0.4.0"
235 version = "0.4.0"
236 source = "registry+https://github.com/rust-lang/crates.io-index"
236 source = "registry+https://github.com/rust-lang/crates.io-index"
237 checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
237 checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
238
238
239 [[package]]
239 [[package]]
240 name = "core-foundation-sys"
240 name = "core-foundation-sys"
241 version = "0.8.3"
241 version = "0.8.3"
242 source = "registry+https://github.com/rust-lang/crates.io-index"
242 source = "registry+https://github.com/rust-lang/crates.io-index"
243 checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
243 checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
244
244
245 [[package]]
245 [[package]]
246 name = "cpufeatures"
246 name = "cpufeatures"
247 version = "0.2.5"
247 version = "0.2.5"
248 source = "registry+https://github.com/rust-lang/crates.io-index"
248 source = "registry+https://github.com/rust-lang/crates.io-index"
249 checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
249 checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
250 dependencies = [
250 dependencies = [
251 "libc",
251 "libc",
252 ]
252 ]
253
253
254 [[package]]
254 [[package]]
255 name = "cpython"
255 name = "cpython"
256 version = "0.7.2"
256 version = "0.7.2"
257 source = "registry+https://github.com/rust-lang/crates.io-index"
257 source = "registry+https://github.com/rust-lang/crates.io-index"
258 checksum = "43b398a2c65baaf5892f10bb69b52508bf7a993380cc4ecd3785aaebb5c79389"
258 checksum = "43b398a2c65baaf5892f10bb69b52508bf7a993380cc4ecd3785aaebb5c79389"
259 dependencies = [
259 dependencies = [
260 "libc",
260 "libc",
261 "num-traits",
261 "num-traits",
262 "paste",
262 "paste",
263 "python3-sys",
263 "python3-sys",
264 ]
264 ]
265
265
266 [[package]]
266 [[package]]
267 name = "crc32fast"
267 name = "crc32fast"
268 version = "1.3.2"
268 version = "1.3.2"
269 source = "registry+https://github.com/rust-lang/crates.io-index"
269 source = "registry+https://github.com/rust-lang/crates.io-index"
270 checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
270 checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
271 dependencies = [
271 dependencies = [
272 "cfg-if",
272 "cfg-if",
273 ]
273 ]
274
274
275 [[package]]
275 [[package]]
276 name = "crossbeam-channel"
276 name = "crossbeam-channel"
277 version = "0.5.6"
277 version = "0.5.6"
278 source = "registry+https://github.com/rust-lang/crates.io-index"
278 source = "registry+https://github.com/rust-lang/crates.io-index"
279 checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
279 checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
280 dependencies = [
280 dependencies = [
281 "cfg-if",
281 "cfg-if",
282 "crossbeam-utils",
282 "crossbeam-utils",
283 ]
283 ]
284
284
285 [[package]]
285 [[package]]
286 name = "crossbeam-deque"
286 name = "crossbeam-deque"
287 version = "0.8.2"
287 version = "0.8.2"
288 source = "registry+https://github.com/rust-lang/crates.io-index"
288 source = "registry+https://github.com/rust-lang/crates.io-index"
289 checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
289 checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
290 dependencies = [
290 dependencies = [
291 "cfg-if",
291 "cfg-if",
292 "crossbeam-epoch",
292 "crossbeam-epoch",
293 "crossbeam-utils",
293 "crossbeam-utils",
294 ]
294 ]
295
295
296 [[package]]
296 [[package]]
297 name = "crossbeam-epoch"
297 name = "crossbeam-epoch"
298 version = "0.9.11"
298 version = "0.9.11"
299 source = "registry+https://github.com/rust-lang/crates.io-index"
299 source = "registry+https://github.com/rust-lang/crates.io-index"
300 checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
300 checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
301 dependencies = [
301 dependencies = [
302 "autocfg",
302 "autocfg",
303 "cfg-if",
303 "cfg-if",
304 "crossbeam-utils",
304 "crossbeam-utils",
305 "memoffset",
305 "memoffset",
306 "scopeguard",
306 "scopeguard",
307 ]
307 ]
308
308
309 [[package]]
309 [[package]]
310 name = "crossbeam-utils"
310 name = "crossbeam-utils"
311 version = "0.8.12"
311 version = "0.8.12"
312 source = "registry+https://github.com/rust-lang/crates.io-index"
312 source = "registry+https://github.com/rust-lang/crates.io-index"
313 checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
313 checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
314 dependencies = [
314 dependencies = [
315 "cfg-if",
315 "cfg-if",
316 ]
316 ]
317
317
318 [[package]]
318 [[package]]
319 name = "crypto-common"
319 name = "crypto-common"
320 version = "0.1.6"
320 version = "0.1.6"
321 source = "registry+https://github.com/rust-lang/crates.io-index"
321 source = "registry+https://github.com/rust-lang/crates.io-index"
322 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
322 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
323 dependencies = [
323 dependencies = [
324 "generic-array",
324 "generic-array",
325 "typenum",
325 "typenum",
326 ]
326 ]
327
327
328 [[package]]
328 [[package]]
329 name = "ctor"
329 name = "ctor"
330 version = "0.1.26"
330 version = "0.1.26"
331 source = "registry+https://github.com/rust-lang/crates.io-index"
331 source = "registry+https://github.com/rust-lang/crates.io-index"
332 checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
332 checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
333 dependencies = [
333 dependencies = [
334 "quote",
334 "quote",
335 "syn",
335 "syn",
336 ]
336 ]
337
337
338 [[package]]
338 [[package]]
339 name = "cxx"
339 name = "cxx"
340 version = "1.0.81"
340 version = "1.0.81"
341 source = "registry+https://github.com/rust-lang/crates.io-index"
341 source = "registry+https://github.com/rust-lang/crates.io-index"
342 checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
342 checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
343 dependencies = [
343 dependencies = [
344 "cc",
344 "cc",
345 "cxxbridge-flags",
345 "cxxbridge-flags",
346 "cxxbridge-macro",
346 "cxxbridge-macro",
347 "link-cplusplus",
347 "link-cplusplus",
348 ]
348 ]
349
349
350 [[package]]
350 [[package]]
351 name = "cxx-build"
351 name = "cxx-build"
352 version = "1.0.81"
352 version = "1.0.81"
353 source = "registry+https://github.com/rust-lang/crates.io-index"
353 source = "registry+https://github.com/rust-lang/crates.io-index"
354 checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
354 checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
355 dependencies = [
355 dependencies = [
356 "cc",
356 "cc",
357 "codespan-reporting",
357 "codespan-reporting",
358 "once_cell",
358 "once_cell",
359 "proc-macro2",
359 "proc-macro2",
360 "quote",
360 "quote",
361 "scratch",
361 "scratch",
362 "syn",
362 "syn",
363 ]
363 ]
364
364
365 [[package]]
365 [[package]]
366 name = "cxxbridge-flags"
366 name = "cxxbridge-flags"
367 version = "1.0.81"
367 version = "1.0.81"
368 source = "registry+https://github.com/rust-lang/crates.io-index"
368 source = "registry+https://github.com/rust-lang/crates.io-index"
369 checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
369 checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
370
370
371 [[package]]
371 [[package]]
372 name = "cxxbridge-macro"
372 name = "cxxbridge-macro"
373 version = "1.0.81"
373 version = "1.0.81"
374 source = "registry+https://github.com/rust-lang/crates.io-index"
374 source = "registry+https://github.com/rust-lang/crates.io-index"
375 checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
375 checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
376 dependencies = [
376 dependencies = [
377 "proc-macro2",
377 "proc-macro2",
378 "quote",
378 "quote",
379 "syn",
379 "syn",
380 ]
380 ]
381
381
382 [[package]]
382 [[package]]
383 name = "derive_more"
383 name = "derive_more"
384 version = "0.99.17"
384 version = "0.99.17"
385 source = "registry+https://github.com/rust-lang/crates.io-index"
385 source = "registry+https://github.com/rust-lang/crates.io-index"
386 checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
386 checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
387 dependencies = [
387 dependencies = [
388 "convert_case",
388 "convert_case",
389 "proc-macro2",
389 "proc-macro2",
390 "quote",
390 "quote",
391 "rustc_version",
391 "rustc_version",
392 "syn",
392 "syn",
393 ]
393 ]
394
394
395 [[package]]
395 [[package]]
396 name = "diff"
396 name = "diff"
397 version = "0.1.13"
397 version = "0.1.13"
398 source = "registry+https://github.com/rust-lang/crates.io-index"
398 source = "registry+https://github.com/rust-lang/crates.io-index"
399 checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
399 checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
400
400
401 [[package]]
401 [[package]]
402 name = "digest"
402 name = "digest"
403 version = "0.9.0"
403 version = "0.9.0"
404 source = "registry+https://github.com/rust-lang/crates.io-index"
404 source = "registry+https://github.com/rust-lang/crates.io-index"
405 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
405 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
406 dependencies = [
406 dependencies = [
407 "generic-array",
407 "generic-array",
408 ]
408 ]
409
409
410 [[package]]
410 [[package]]
411 name = "digest"
411 name = "digest"
412 version = "0.10.5"
412 version = "0.10.5"
413 source = "registry+https://github.com/rust-lang/crates.io-index"
413 source = "registry+https://github.com/rust-lang/crates.io-index"
414 checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
414 checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
415 dependencies = [
415 dependencies = [
416 "block-buffer 0.10.3",
416 "block-buffer 0.10.3",
417 "crypto-common",
417 "crypto-common",
418 ]
418 ]
419
419
420 [[package]]
420 [[package]]
421 name = "dirs"
421 name = "dirs"
422 version = "5.0.1"
422 version = "5.0.1"
423 source = "registry+https://github.com/rust-lang/crates.io-index"
423 source = "registry+https://github.com/rust-lang/crates.io-index"
424 checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
424 checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
425 dependencies = [
425 dependencies = [
426 "dirs-sys",
426 "dirs-sys",
427 ]
427 ]
428
428
429 [[package]]
429 [[package]]
430 name = "dirs-sys"
430 name = "dirs-sys"
431 version = "0.4.1"
431 version = "0.4.1"
432 source = "registry+https://github.com/rust-lang/crates.io-index"
432 source = "registry+https://github.com/rust-lang/crates.io-index"
433 checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
433 checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
434 dependencies = [
434 dependencies = [
435 "libc",
435 "libc",
436 "option-ext",
436 "option-ext",
437 "redox_users",
437 "redox_users",
438 "windows-sys",
438 "windows-sys",
439 ]
439 ]
440
440
441 [[package]]
441 [[package]]
442 name = "either"
442 name = "either"
443 version = "1.8.0"
443 version = "1.8.0"
444 source = "registry+https://github.com/rust-lang/crates.io-index"
444 source = "registry+https://github.com/rust-lang/crates.io-index"
445 checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
445 checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
446
446
447 [[package]]
447 [[package]]
448 name = "env_logger"
448 name = "env_logger"
449 version = "0.9.3"
449 version = "0.9.3"
450 source = "registry+https://github.com/rust-lang/crates.io-index"
450 source = "registry+https://github.com/rust-lang/crates.io-index"
451 checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
451 checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
452 dependencies = [
452 dependencies = [
453 "atty",
453 "atty",
454 "humantime",
454 "humantime",
455 "log",
455 "log",
456 "regex",
456 "regex",
457 "termcolor",
457 "termcolor",
458 ]
458 ]
459
459
460 [[package]]
460 [[package]]
461 name = "fastrand"
461 name = "fastrand"
462 version = "1.8.0"
462 version = "1.8.0"
463 source = "registry+https://github.com/rust-lang/crates.io-index"
463 source = "registry+https://github.com/rust-lang/crates.io-index"
464 checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
464 checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
465 dependencies = [
465 dependencies = [
466 "instant",
466 "instant",
467 ]
467 ]
468
468
469 [[package]]
469 [[package]]
470 name = "flate2"
470 name = "flate2"
471 version = "1.0.24"
471 version = "1.0.24"
472 source = "registry+https://github.com/rust-lang/crates.io-index"
472 source = "registry+https://github.com/rust-lang/crates.io-index"
473 checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
473 checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
474 dependencies = [
474 dependencies = [
475 "crc32fast",
475 "crc32fast",
476 "libz-sys",
476 "libz-sys",
477 "miniz_oxide",
477 "miniz_oxide",
478 ]
478 ]
479
479
480 [[package]]
480 [[package]]
481 name = "format-bytes"
481 name = "format-bytes"
482 version = "0.3.0"
482 version = "0.3.0"
483 source = "registry+https://github.com/rust-lang/crates.io-index"
483 source = "registry+https://github.com/rust-lang/crates.io-index"
484 checksum = "48942366ef93975da38e175ac9e10068c6fc08ca9e85930d4f098f4d5b14c2fd"
484 checksum = "48942366ef93975da38e175ac9e10068c6fc08ca9e85930d4f098f4d5b14c2fd"
485 dependencies = [
485 dependencies = [
486 "format-bytes-macros",
486 "format-bytes-macros",
487 ]
487 ]
488
488
489 [[package]]
489 [[package]]
490 name = "format-bytes-macros"
490 name = "format-bytes-macros"
491 version = "0.4.0"
491 version = "0.4.0"
492 source = "registry+https://github.com/rust-lang/crates.io-index"
492 source = "registry+https://github.com/rust-lang/crates.io-index"
493 checksum = "203aadebefcc73d12038296c228eabf830f99cba991b0032adf20e9fa6ce7e4f"
493 checksum = "203aadebefcc73d12038296c228eabf830f99cba991b0032adf20e9fa6ce7e4f"
494 dependencies = [
494 dependencies = [
495 "proc-macro2",
495 "proc-macro2",
496 "quote",
496 "quote",
497 "syn",
497 "syn",
498 ]
498 ]
499
499
500 [[package]]
500 [[package]]
501 name = "funty"
501 name = "funty"
502 version = "2.0.0"
502 version = "2.0.0"
503 source = "registry+https://github.com/rust-lang/crates.io-index"
503 source = "registry+https://github.com/rust-lang/crates.io-index"
504 checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
504 checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
505
505
506 [[package]]
506 [[package]]
507 name = "generic-array"
507 name = "generic-array"
508 version = "0.14.6"
508 version = "0.14.6"
509 source = "registry+https://github.com/rust-lang/crates.io-index"
509 source = "registry+https://github.com/rust-lang/crates.io-index"
510 checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
510 checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
511 dependencies = [
511 dependencies = [
512 "typenum",
512 "typenum",
513 "version_check",
513 "version_check",
514 ]
514 ]
515
515
516 [[package]]
516 [[package]]
517 name = "getrandom"
517 name = "getrandom"
518 version = "0.1.16"
518 version = "0.1.16"
519 source = "registry+https://github.com/rust-lang/crates.io-index"
519 source = "registry+https://github.com/rust-lang/crates.io-index"
520 checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
520 checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
521 dependencies = [
521 dependencies = [
522 "cfg-if",
522 "cfg-if",
523 "libc",
523 "libc",
524 "wasi 0.9.0+wasi-snapshot-preview1",
524 "wasi 0.9.0+wasi-snapshot-preview1",
525 ]
525 ]
526
526
527 [[package]]
527 [[package]]
528 name = "getrandom"
528 name = "getrandom"
529 version = "0.2.8"
529 version = "0.2.8"
530 source = "registry+https://github.com/rust-lang/crates.io-index"
530 source = "registry+https://github.com/rust-lang/crates.io-index"
531 checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
531 checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
532 dependencies = [
532 dependencies = [
533 "cfg-if",
533 "cfg-if",
534 "libc",
534 "libc",
535 "wasi 0.11.0+wasi-snapshot-preview1",
535 "wasi 0.11.0+wasi-snapshot-preview1",
536 ]
536 ]
537
537
538 [[package]]
538 [[package]]
539 name = "hashbrown"
539 name = "hashbrown"
540 version = "0.12.3"
540 version = "0.12.3"
541 source = "registry+https://github.com/rust-lang/crates.io-index"
541 source = "registry+https://github.com/rust-lang/crates.io-index"
542 checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
542 checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
543
543
544 [[package]]
544 [[package]]
545 name = "hashbrown"
545 name = "hashbrown"
546 version = "0.13.1"
546 version = "0.13.1"
547 source = "registry+https://github.com/rust-lang/crates.io-index"
547 source = "registry+https://github.com/rust-lang/crates.io-index"
548 checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
548 checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
549 dependencies = [
549 dependencies = [
550 "ahash",
550 "ahash",
551 "rayon",
551 "rayon",
552 ]
552 ]
553
553
554 [[package]]
554 [[package]]
555 name = "heck"
555 name = "heck"
556 version = "0.4.0"
556 version = "0.4.0"
557 source = "registry+https://github.com/rust-lang/crates.io-index"
557 source = "registry+https://github.com/rust-lang/crates.io-index"
558 checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
558 checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
559
559
560 [[package]]
560 [[package]]
561 name = "hermit-abi"
561 name = "hermit-abi"
562 version = "0.1.19"
562 version = "0.1.19"
563 source = "registry+https://github.com/rust-lang/crates.io-index"
563 source = "registry+https://github.com/rust-lang/crates.io-index"
564 checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
564 checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
565 dependencies = [
565 dependencies = [
566 "libc",
566 "libc",
567 ]
567 ]
568
568
569 [[package]]
569 [[package]]
570 name = "hex"
570 name = "hex"
571 version = "0.4.3"
571 version = "0.4.3"
572 source = "registry+https://github.com/rust-lang/crates.io-index"
572 source = "registry+https://github.com/rust-lang/crates.io-index"
573 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
573 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
574
574
575 [[package]]
575 [[package]]
576 name = "hg-core"
576 name = "hg-core"
577 version = "0.1.0"
577 version = "0.1.0"
578 dependencies = [
578 dependencies = [
579 "bitflags 1.3.2",
579 "bitflags 1.3.2",
580 "bitvec",
580 "bitvec",
581 "byteorder",
581 "byteorder",
582 "bytes-cast",
582 "bytes-cast",
583 "chrono",
583 "chrono",
584 "clap",
584 "clap",
585 "crossbeam-channel",
585 "crossbeam-channel",
586 "derive_more",
586 "derive_more",
587 "flate2",
587 "flate2",
588 "format-bytes",
588 "format-bytes",
589 "hashbrown 0.13.1",
589 "hashbrown 0.13.1",
590 "home",
590 "home",
591 "im-rc",
591 "im-rc",
592 "itertools",
592 "itertools",
593 "lazy_static",
593 "lazy_static",
594 "libc",
594 "libc",
595 "log",
595 "log",
596 "logging_timer",
596 "logging_timer",
597 "memmap2",
597 "memmap2",
598 "once_cell",
598 "once_cell",
599 "pretty_assertions",
599 "pretty_assertions",
600 "rand 0.8.5",
600 "rand 0.8.5",
601 "rand_distr",
601 "rand_distr",
602 "rand_pcg",
602 "rand_pcg",
603 "rayon",
603 "rayon",
604 "regex",
604 "regex",
605 "same-file",
605 "same-file",
606 "self_cell",
606 "self_cell",
607 "serde",
607 "serde",
608 "sha-1 0.10.0",
608 "sha-1 0.10.0",
609 "tempfile",
609 "tempfile",
610 "thread_local",
610 "thread_local",
611 "toml",
611 "toml",
612 "twox-hash",
612 "twox-hash",
613 "zstd",
613 "zstd",
614 ]
614 ]
615
615
616 [[package]]
616 [[package]]
617 name = "hg-cpython"
617 name = "hg-cpython"
618 version = "0.1.0"
618 version = "0.1.0"
619 dependencies = [
619 dependencies = [
620 "cpython",
620 "cpython",
621 "crossbeam-channel",
621 "crossbeam-channel",
622 "env_logger",
622 "env_logger",
623 "hg-core",
623 "hg-core",
624 "libc",
624 "libc",
625 "log",
625 "log",
626 "stable_deref_trait",
626 "stable_deref_trait",
627 "vcsgraph",
627 "vcsgraph",
628 ]
628 ]
629
629
630 [[package]]
630 [[package]]
631 name = "home"
631 name = "home"
632 version = "0.5.4"
632 version = "0.5.4"
633 source = "registry+https://github.com/rust-lang/crates.io-index"
633 source = "registry+https://github.com/rust-lang/crates.io-index"
634 checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
634 checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
635 dependencies = [
635 dependencies = [
636 "winapi",
636 "winapi",
637 ]
637 ]
638
638
639 [[package]]
639 [[package]]
640 name = "humantime"
640 name = "humantime"
641 version = "2.1.0"
641 version = "2.1.0"
642 source = "registry+https://github.com/rust-lang/crates.io-index"
642 source = "registry+https://github.com/rust-lang/crates.io-index"
643 checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
643 checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
644
644
645 [[package]]
645 [[package]]
646 name = "iana-time-zone"
646 name = "iana-time-zone"
647 version = "0.1.53"
647 version = "0.1.53"
648 source = "registry+https://github.com/rust-lang/crates.io-index"
648 source = "registry+https://github.com/rust-lang/crates.io-index"
649 checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
649 checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
650 dependencies = [
650 dependencies = [
651 "android_system_properties",
651 "android_system_properties",
652 "core-foundation-sys",
652 "core-foundation-sys",
653 "iana-time-zone-haiku",
653 "iana-time-zone-haiku",
654 "js-sys",
654 "js-sys",
655 "wasm-bindgen",
655 "wasm-bindgen",
656 "winapi",
656 "winapi",
657 ]
657 ]
658
658
659 [[package]]
659 [[package]]
660 name = "iana-time-zone-haiku"
660 name = "iana-time-zone-haiku"
661 version = "0.1.1"
661 version = "0.1.1"
662 source = "registry+https://github.com/rust-lang/crates.io-index"
662 source = "registry+https://github.com/rust-lang/crates.io-index"
663 checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
663 checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
664 dependencies = [
664 dependencies = [
665 "cxx",
665 "cxx",
666 "cxx-build",
666 "cxx-build",
667 ]
667 ]
668
668
669 [[package]]
669 [[package]]
670 name = "im-rc"
670 name = "im-rc"
671 version = "15.1.0"
671 version = "15.1.0"
672 source = "registry+https://github.com/rust-lang/crates.io-index"
672 source = "registry+https://github.com/rust-lang/crates.io-index"
673 checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
673 checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
674 dependencies = [
674 dependencies = [
675 "bitmaps",
675 "bitmaps",
676 "rand_core 0.6.4",
676 "rand_core 0.6.4",
677 "rand_xoshiro",
677 "rand_xoshiro",
678 "sized-chunks",
678 "sized-chunks",
679 "typenum",
679 "typenum",
680 "version_check",
680 "version_check",
681 ]
681 ]
682
682
683 [[package]]
683 [[package]]
684 name = "indexmap"
684 name = "indexmap"
685 version = "1.9.2"
685 version = "1.9.2"
686 source = "registry+https://github.com/rust-lang/crates.io-index"
686 source = "registry+https://github.com/rust-lang/crates.io-index"
687 checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
687 checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
688 dependencies = [
688 dependencies = [
689 "autocfg",
689 "autocfg",
690 "hashbrown 0.12.3",
690 "hashbrown 0.12.3",
691 ]
691 ]
692
692
693 [[package]]
693 [[package]]
694 name = "instant"
694 name = "instant"
695 version = "0.1.12"
695 version = "0.1.12"
696 source = "registry+https://github.com/rust-lang/crates.io-index"
696 source = "registry+https://github.com/rust-lang/crates.io-index"
697 checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
697 checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
698 dependencies = [
698 dependencies = [
699 "cfg-if",
699 "cfg-if",
700 ]
700 ]
701
701
702 [[package]]
702 [[package]]
703 name = "itertools"
703 name = "itertools"
704 version = "0.10.5"
704 version = "0.10.5"
705 source = "registry+https://github.com/rust-lang/crates.io-index"
705 source = "registry+https://github.com/rust-lang/crates.io-index"
706 checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
706 checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
707 dependencies = [
707 dependencies = [
708 "either",
708 "either",
709 ]
709 ]
710
710
711 [[package]]
711 [[package]]
712 name = "jobserver"
712 name = "jobserver"
713 version = "0.1.25"
713 version = "0.1.25"
714 source = "registry+https://github.com/rust-lang/crates.io-index"
714 source = "registry+https://github.com/rust-lang/crates.io-index"
715 checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
715 checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
716 dependencies = [
716 dependencies = [
717 "libc",
717 "libc",
718 ]
718 ]
719
719
720 [[package]]
720 [[package]]
721 name = "js-sys"
721 name = "js-sys"
722 version = "0.3.60"
722 version = "0.3.60"
723 source = "registry+https://github.com/rust-lang/crates.io-index"
723 source = "registry+https://github.com/rust-lang/crates.io-index"
724 checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
724 checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
725 dependencies = [
725 dependencies = [
726 "wasm-bindgen",
726 "wasm-bindgen",
727 ]
727 ]
728
728
729 [[package]]
729 [[package]]
730 name = "lazy_static"
730 name = "lazy_static"
731 version = "1.4.0"
731 version = "1.4.0"
732 source = "registry+https://github.com/rust-lang/crates.io-index"
732 source = "registry+https://github.com/rust-lang/crates.io-index"
733 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
733 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
734
734
735 [[package]]
735 [[package]]
736 name = "libc"
736 name = "libc"
737 version = "0.2.137"
737 version = "0.2.155"
738 source = "registry+https://github.com/rust-lang/crates.io-index"
738 source = "registry+https://github.com/rust-lang/crates.io-index"
739 checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
739 checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
740
740
741 [[package]]
741 [[package]]
742 name = "libm"
742 name = "libm"
743 version = "0.2.6"
743 version = "0.2.6"
744 source = "registry+https://github.com/rust-lang/crates.io-index"
744 source = "registry+https://github.com/rust-lang/crates.io-index"
745 checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
745 checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
746
746
747 [[package]]
747 [[package]]
748 name = "libredox"
748 name = "libredox"
749 version = "0.1.3"
749 version = "0.1.3"
750 source = "registry+https://github.com/rust-lang/crates.io-index"
750 source = "registry+https://github.com/rust-lang/crates.io-index"
751 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
751 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
752 dependencies = [
752 dependencies = [
753 "bitflags 2.6.0",
753 "bitflags 2.6.0",
754 "libc",
754 "libc",
755 ]
755 ]
756
756
757 [[package]]
757 [[package]]
758 name = "libz-sys"
758 name = "libz-sys"
759 version = "1.1.8"
759 version = "1.1.8"
760 source = "registry+https://github.com/rust-lang/crates.io-index"
760 source = "registry+https://github.com/rust-lang/crates.io-index"
761 checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
761 checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
762 dependencies = [
762 dependencies = [
763 "cc",
763 "cc",
764 "pkg-config",
764 "pkg-config",
765 "vcpkg",
765 "vcpkg",
766 ]
766 ]
767
767
768 [[package]]
768 [[package]]
769 name = "link-cplusplus"
769 name = "link-cplusplus"
770 version = "1.0.7"
770 version = "1.0.7"
771 source = "registry+https://github.com/rust-lang/crates.io-index"
771 source = "registry+https://github.com/rust-lang/crates.io-index"
772 checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
772 checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
773 dependencies = [
773 dependencies = [
774 "cc",
774 "cc",
775 ]
775 ]
776
776
777 [[package]]
777 [[package]]
778 name = "log"
778 name = "log"
779 version = "0.4.17"
779 version = "0.4.17"
780 source = "registry+https://github.com/rust-lang/crates.io-index"
780 source = "registry+https://github.com/rust-lang/crates.io-index"
781 checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
781 checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
782 dependencies = [
782 dependencies = [
783 "cfg-if",
783 "cfg-if",
784 ]
784 ]
785
785
786 [[package]]
786 [[package]]
787 name = "logging_timer"
787 name = "logging_timer"
788 version = "1.1.0"
788 version = "1.1.0"
789 source = "registry+https://github.com/rust-lang/crates.io-index"
789 source = "registry+https://github.com/rust-lang/crates.io-index"
790 checksum = "64e96f261d684b7089aa576bb74e823241dccd994b27d30fabf1dcb3af284fe9"
790 checksum = "64e96f261d684b7089aa576bb74e823241dccd994b27d30fabf1dcb3af284fe9"
791 dependencies = [
791 dependencies = [
792 "log",
792 "log",
793 "logging_timer_proc_macros",
793 "logging_timer_proc_macros",
794 ]
794 ]
795
795
796 [[package]]
796 [[package]]
797 name = "logging_timer_proc_macros"
797 name = "logging_timer_proc_macros"
798 version = "1.1.0"
798 version = "1.1.0"
799 source = "registry+https://github.com/rust-lang/crates.io-index"
799 source = "registry+https://github.com/rust-lang/crates.io-index"
800 checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
800 checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
801 dependencies = [
801 dependencies = [
802 "log",
802 "log",
803 "proc-macro2",
803 "proc-macro2",
804 "quote",
804 "quote",
805 "syn",
805 "syn",
806 ]
806 ]
807
807
808 [[package]]
808 [[package]]
809 name = "memchr"
809 name = "memchr"
810 version = "2.5.0"
810 version = "2.5.0"
811 source = "registry+https://github.com/rust-lang/crates.io-index"
811 source = "registry+https://github.com/rust-lang/crates.io-index"
812 checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
812 checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
813
813
814 [[package]]
814 [[package]]
815 name = "memmap2"
815 name = "memmap2"
816 version = "0.5.8"
816 version = "0.5.8"
817 source = "registry+https://github.com/rust-lang/crates.io-index"
817 source = "registry+https://github.com/rust-lang/crates.io-index"
818 checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
818 checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
819 dependencies = [
819 dependencies = [
820 "libc",
820 "libc",
821 "stable_deref_trait",
821 "stable_deref_trait",
822 ]
822 ]
823
823
824 [[package]]
824 [[package]]
825 name = "memoffset"
825 name = "memoffset"
826 version = "0.6.5"
826 version = "0.6.5"
827 source = "registry+https://github.com/rust-lang/crates.io-index"
827 source = "registry+https://github.com/rust-lang/crates.io-index"
828 checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
828 checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
829 dependencies = [
829 dependencies = [
830 "autocfg",
830 "autocfg",
831 ]
831 ]
832
832
833 [[package]]
833 [[package]]
834 name = "miniz_oxide"
834 name = "miniz_oxide"
835 version = "0.5.4"
835 version = "0.5.4"
836 source = "registry+https://github.com/rust-lang/crates.io-index"
836 source = "registry+https://github.com/rust-lang/crates.io-index"
837 checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
837 checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
838 dependencies = [
838 dependencies = [
839 "adler",
839 "adler",
840 ]
840 ]
841
841
842 [[package]]
842 [[package]]
843 name = "nom8"
843 name = "nom8"
844 version = "0.2.0"
844 version = "0.2.0"
845 source = "registry+https://github.com/rust-lang/crates.io-index"
845 source = "registry+https://github.com/rust-lang/crates.io-index"
846 checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
846 checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
847 dependencies = [
847 dependencies = [
848 "memchr",
848 "memchr",
849 ]
849 ]
850
850
851 [[package]]
851 [[package]]
852 name = "num-traits"
852 name = "num-traits"
853 version = "0.2.15"
853 version = "0.2.15"
854 source = "registry+https://github.com/rust-lang/crates.io-index"
854 source = "registry+https://github.com/rust-lang/crates.io-index"
855 checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
855 checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
856 dependencies = [
856 dependencies = [
857 "autocfg",
857 "autocfg",
858 "libm",
858 "libm",
859 ]
859 ]
860
860
861 [[package]]
861 [[package]]
862 name = "num_cpus"
862 name = "num_cpus"
863 version = "1.14.0"
863 version = "1.14.0"
864 source = "registry+https://github.com/rust-lang/crates.io-index"
864 source = "registry+https://github.com/rust-lang/crates.io-index"
865 checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
865 checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
866 dependencies = [
866 dependencies = [
867 "hermit-abi",
867 "hermit-abi",
868 "libc",
868 "libc",
869 ]
869 ]
870
870
871 [[package]]
871 [[package]]
872 name = "once_cell"
872 name = "once_cell"
873 version = "1.16.0"
873 version = "1.16.0"
874 source = "registry+https://github.com/rust-lang/crates.io-index"
874 source = "registry+https://github.com/rust-lang/crates.io-index"
875 checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
875 checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
876
876
877 [[package]]
877 [[package]]
878 name = "opaque-debug"
878 name = "opaque-debug"
879 version = "0.3.0"
879 version = "0.3.0"
880 source = "registry+https://github.com/rust-lang/crates.io-index"
880 source = "registry+https://github.com/rust-lang/crates.io-index"
881 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
881 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
882
882
883 [[package]]
883 [[package]]
884 name = "option-ext"
884 name = "option-ext"
885 version = "0.2.0"
885 version = "0.2.0"
886 source = "registry+https://github.com/rust-lang/crates.io-index"
886 source = "registry+https://github.com/rust-lang/crates.io-index"
887 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
887 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
888
888
889 [[package]]
889 [[package]]
890 name = "os_str_bytes"
890 name = "os_str_bytes"
891 version = "6.4.0"
891 version = "6.4.0"
892 source = "registry+https://github.com/rust-lang/crates.io-index"
892 source = "registry+https://github.com/rust-lang/crates.io-index"
893 checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
893 checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
894 dependencies = [
894 dependencies = [
895 "memchr",
895 "memchr",
896 ]
896 ]
897
897
898 [[package]]
898 [[package]]
899 name = "output_vt100"
899 name = "output_vt100"
900 version = "0.1.3"
900 version = "0.1.3"
901 source = "registry+https://github.com/rust-lang/crates.io-index"
901 source = "registry+https://github.com/rust-lang/crates.io-index"
902 checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
902 checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
903 dependencies = [
903 dependencies = [
904 "winapi",
904 "winapi",
905 ]
905 ]
906
906
907 [[package]]
907 [[package]]
908 name = "paste"
908 name = "paste"
909 version = "1.0.9"
909 version = "1.0.9"
910 source = "registry+https://github.com/rust-lang/crates.io-index"
910 source = "registry+https://github.com/rust-lang/crates.io-index"
911 checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
911 checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
912
912
913 [[package]]
913 [[package]]
914 name = "pkg-config"
914 name = "pkg-config"
915 version = "0.3.26"
915 version = "0.3.26"
916 source = "registry+https://github.com/rust-lang/crates.io-index"
916 source = "registry+https://github.com/rust-lang/crates.io-index"
917 checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
917 checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
918
918
919 [[package]]
919 [[package]]
920 name = "ppv-lite86"
920 name = "ppv-lite86"
921 version = "0.2.17"
921 version = "0.2.17"
922 source = "registry+https://github.com/rust-lang/crates.io-index"
922 source = "registry+https://github.com/rust-lang/crates.io-index"
923 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
923 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
924
924
925 [[package]]
925 [[package]]
926 name = "pretty_assertions"
926 name = "pretty_assertions"
927 version = "1.3.0"
927 version = "1.3.0"
928 source = "registry+https://github.com/rust-lang/crates.io-index"
928 source = "registry+https://github.com/rust-lang/crates.io-index"
929 checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
929 checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
930 dependencies = [
930 dependencies = [
931 "ctor",
931 "ctor",
932 "diff",
932 "diff",
933 "output_vt100",
933 "output_vt100",
934 "yansi",
934 "yansi",
935 ]
935 ]
936
936
937 [[package]]
937 [[package]]
938 name = "proc-macro-error"
938 name = "proc-macro-error"
939 version = "1.0.4"
939 version = "1.0.4"
940 source = "registry+https://github.com/rust-lang/crates.io-index"
940 source = "registry+https://github.com/rust-lang/crates.io-index"
941 checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
941 checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
942 dependencies = [
942 dependencies = [
943 "proc-macro-error-attr",
943 "proc-macro-error-attr",
944 "proc-macro2",
944 "proc-macro2",
945 "quote",
945 "quote",
946 "syn",
946 "syn",
947 "version_check",
947 "version_check",
948 ]
948 ]
949
949
950 [[package]]
950 [[package]]
951 name = "proc-macro-error-attr"
951 name = "proc-macro-error-attr"
952 version = "1.0.4"
952 version = "1.0.4"
953 source = "registry+https://github.com/rust-lang/crates.io-index"
953 source = "registry+https://github.com/rust-lang/crates.io-index"
954 checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
954 checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
955 dependencies = [
955 dependencies = [
956 "proc-macro2",
956 "proc-macro2",
957 "quote",
957 "quote",
958 "version_check",
958 "version_check",
959 ]
959 ]
960
960
961 [[package]]
961 [[package]]
962 name = "proc-macro2"
962 name = "proc-macro2"
963 version = "1.0.47"
963 version = "1.0.47"
964 source = "registry+https://github.com/rust-lang/crates.io-index"
964 source = "registry+https://github.com/rust-lang/crates.io-index"
965 checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
965 checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
966 dependencies = [
966 dependencies = [
967 "unicode-ident",
967 "unicode-ident",
968 ]
968 ]
969
969
970 [[package]]
970 [[package]]
971 name = "python3-sys"
971 name = "python3-sys"
972 version = "0.7.2"
972 version = "0.7.2"
973 source = "registry+https://github.com/rust-lang/crates.io-index"
973 source = "registry+https://github.com/rust-lang/crates.io-index"
974 checksum = "0f53ef6740367a09718d2cd21ba15b0d7972342a38e554736bcee7773e45c9f5"
974 checksum = "0f53ef6740367a09718d2cd21ba15b0d7972342a38e554736bcee7773e45c9f5"
975 dependencies = [
975 dependencies = [
976 "libc",
976 "libc",
977 "regex",
977 "regex",
978 ]
978 ]
979
979
980 [[package]]
980 [[package]]
981 name = "quote"
981 name = "quote"
982 version = "1.0.21"
982 version = "1.0.21"
983 source = "registry+https://github.com/rust-lang/crates.io-index"
983 source = "registry+https://github.com/rust-lang/crates.io-index"
984 checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
984 checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
985 dependencies = [
985 dependencies = [
986 "proc-macro2",
986 "proc-macro2",
987 ]
987 ]
988
988
989 [[package]]
989 [[package]]
990 name = "radium"
990 name = "radium"
991 version = "0.7.0"
991 version = "0.7.0"
992 source = "registry+https://github.com/rust-lang/crates.io-index"
992 source = "registry+https://github.com/rust-lang/crates.io-index"
993 checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
993 checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
994
994
995 [[package]]
995 [[package]]
996 name = "rand"
996 name = "rand"
997 version = "0.7.3"
997 version = "0.7.3"
998 source = "registry+https://github.com/rust-lang/crates.io-index"
998 source = "registry+https://github.com/rust-lang/crates.io-index"
999 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
999 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
1000 dependencies = [
1000 dependencies = [
1001 "getrandom 0.1.16",
1001 "getrandom 0.1.16",
1002 "libc",
1002 "libc",
1003 "rand_chacha 0.2.2",
1003 "rand_chacha 0.2.2",
1004 "rand_core 0.5.1",
1004 "rand_core 0.5.1",
1005 "rand_hc",
1005 "rand_hc",
1006 ]
1006 ]
1007
1007
1008 [[package]]
1008 [[package]]
1009 name = "rand"
1009 name = "rand"
1010 version = "0.8.5"
1010 version = "0.8.5"
1011 source = "registry+https://github.com/rust-lang/crates.io-index"
1011 source = "registry+https://github.com/rust-lang/crates.io-index"
1012 checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1012 checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1013 dependencies = [
1013 dependencies = [
1014 "libc",
1014 "libc",
1015 "rand_chacha 0.3.1",
1015 "rand_chacha 0.3.1",
1016 "rand_core 0.6.4",
1016 "rand_core 0.6.4",
1017 ]
1017 ]
1018
1018
1019 [[package]]
1019 [[package]]
1020 name = "rand_chacha"
1020 name = "rand_chacha"
1021 version = "0.2.2"
1021 version = "0.2.2"
1022 source = "registry+https://github.com/rust-lang/crates.io-index"
1022 source = "registry+https://github.com/rust-lang/crates.io-index"
1023 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
1023 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
1024 dependencies = [
1024 dependencies = [
1025 "ppv-lite86",
1025 "ppv-lite86",
1026 "rand_core 0.5.1",
1026 "rand_core 0.5.1",
1027 ]
1027 ]
1028
1028
1029 [[package]]
1029 [[package]]
1030 name = "rand_chacha"
1030 name = "rand_chacha"
1031 version = "0.3.1"
1031 version = "0.3.1"
1032 source = "registry+https://github.com/rust-lang/crates.io-index"
1032 source = "registry+https://github.com/rust-lang/crates.io-index"
1033 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1033 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1034 dependencies = [
1034 dependencies = [
1035 "ppv-lite86",
1035 "ppv-lite86",
1036 "rand_core 0.6.4",
1036 "rand_core 0.6.4",
1037 ]
1037 ]
1038
1038
1039 [[package]]
1039 [[package]]
1040 name = "rand_core"
1040 name = "rand_core"
1041 version = "0.5.1"
1041 version = "0.5.1"
1042 source = "registry+https://github.com/rust-lang/crates.io-index"
1042 source = "registry+https://github.com/rust-lang/crates.io-index"
1043 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
1043 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
1044 dependencies = [
1044 dependencies = [
1045 "getrandom 0.1.16",
1045 "getrandom 0.1.16",
1046 ]
1046 ]
1047
1047
1048 [[package]]
1048 [[package]]
1049 name = "rand_core"
1049 name = "rand_core"
1050 version = "0.6.4"
1050 version = "0.6.4"
1051 source = "registry+https://github.com/rust-lang/crates.io-index"
1051 source = "registry+https://github.com/rust-lang/crates.io-index"
1052 checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1052 checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1053 dependencies = [
1053 dependencies = [
1054 "getrandom 0.2.8",
1054 "getrandom 0.2.8",
1055 ]
1055 ]
1056
1056
1057 [[package]]
1057 [[package]]
1058 name = "rand_distr"
1058 name = "rand_distr"
1059 version = "0.4.3"
1059 version = "0.4.3"
1060 source = "registry+https://github.com/rust-lang/crates.io-index"
1060 source = "registry+https://github.com/rust-lang/crates.io-index"
1061 checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31"
1061 checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31"
1062 dependencies = [
1062 dependencies = [
1063 "num-traits",
1063 "num-traits",
1064 "rand 0.8.5",
1064 "rand 0.8.5",
1065 ]
1065 ]
1066
1066
1067 [[package]]
1067 [[package]]
1068 name = "rand_hc"
1068 name = "rand_hc"
1069 version = "0.2.0"
1069 version = "0.2.0"
1070 source = "registry+https://github.com/rust-lang/crates.io-index"
1070 source = "registry+https://github.com/rust-lang/crates.io-index"
1071 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
1071 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
1072 dependencies = [
1072 dependencies = [
1073 "rand_core 0.5.1",
1073 "rand_core 0.5.1",
1074 ]
1074 ]
1075
1075
1076 [[package]]
1076 [[package]]
1077 name = "rand_pcg"
1077 name = "rand_pcg"
1078 version = "0.3.1"
1078 version = "0.3.1"
1079 source = "registry+https://github.com/rust-lang/crates.io-index"
1079 source = "registry+https://github.com/rust-lang/crates.io-index"
1080 checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
1080 checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
1081 dependencies = [
1081 dependencies = [
1082 "rand_core 0.6.4",
1082 "rand_core 0.6.4",
1083 ]
1083 ]
1084
1084
1085 [[package]]
1085 [[package]]
1086 name = "rand_xoshiro"
1086 name = "rand_xoshiro"
1087 version = "0.6.0"
1087 version = "0.6.0"
1088 source = "registry+https://github.com/rust-lang/crates.io-index"
1088 source = "registry+https://github.com/rust-lang/crates.io-index"
1089 checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
1089 checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
1090 dependencies = [
1090 dependencies = [
1091 "rand_core 0.6.4",
1091 "rand_core 0.6.4",
1092 ]
1092 ]
1093
1093
1094 [[package]]
1094 [[package]]
1095 name = "rayon"
1095 name = "rayon"
1096 version = "1.7.0"
1096 version = "1.7.0"
1097 source = "registry+https://github.com/rust-lang/crates.io-index"
1097 source = "registry+https://github.com/rust-lang/crates.io-index"
1098 checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
1098 checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
1099 dependencies = [
1099 dependencies = [
1100 "either",
1100 "either",
1101 "rayon-core",
1101 "rayon-core",
1102 ]
1102 ]
1103
1103
1104 [[package]]
1104 [[package]]
1105 name = "rayon-core"
1105 name = "rayon-core"
1106 version = "1.11.0"
1106 version = "1.11.0"
1107 source = "registry+https://github.com/rust-lang/crates.io-index"
1107 source = "registry+https://github.com/rust-lang/crates.io-index"
1108 checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
1108 checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
1109 dependencies = [
1109 dependencies = [
1110 "crossbeam-channel",
1110 "crossbeam-channel",
1111 "crossbeam-deque",
1111 "crossbeam-deque",
1112 "crossbeam-utils",
1112 "crossbeam-utils",
1113 "num_cpus",
1113 "num_cpus",
1114 ]
1114 ]
1115
1115
1116 [[package]]
1116 [[package]]
1117 name = "redox_syscall"
1117 name = "redox_syscall"
1118 version = "0.2.16"
1118 version = "0.2.16"
1119 source = "registry+https://github.com/rust-lang/crates.io-index"
1119 source = "registry+https://github.com/rust-lang/crates.io-index"
1120 checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
1120 checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
1121 dependencies = [
1121 dependencies = [
1122 "bitflags 1.3.2",
1122 "bitflags 1.3.2",
1123 ]
1123 ]
1124
1124
1125 [[package]]
1125 [[package]]
1126 name = "redox_users"
1126 name = "redox_users"
1127 version = "0.4.5"
1127 version = "0.4.5"
1128 source = "registry+https://github.com/rust-lang/crates.io-index"
1128 source = "registry+https://github.com/rust-lang/crates.io-index"
1129 checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
1129 checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
1130 dependencies = [
1130 dependencies = [
1131 "getrandom 0.2.8",
1131 "getrandom 0.2.8",
1132 "libredox",
1132 "libredox",
1133 "thiserror",
1133 "thiserror",
1134 ]
1134 ]
1135
1135
1136 [[package]]
1136 [[package]]
1137 name = "regex"
1137 name = "regex"
1138 version = "1.7.0"
1138 version = "1.7.0"
1139 source = "registry+https://github.com/rust-lang/crates.io-index"
1139 source = "registry+https://github.com/rust-lang/crates.io-index"
1140 checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
1140 checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
1141 dependencies = [
1141 dependencies = [
1142 "aho-corasick",
1142 "aho-corasick",
1143 "memchr",
1143 "memchr",
1144 "regex-syntax",
1144 "regex-syntax",
1145 ]
1145 ]
1146
1146
1147 [[package]]
1147 [[package]]
1148 name = "regex-automata"
1148 name = "regex-automata"
1149 version = "0.3.9"
1149 version = "0.3.9"
1150 source = "registry+https://github.com/rust-lang/crates.io-index"
1150 source = "registry+https://github.com/rust-lang/crates.io-index"
1151 checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
1151 checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
1152
1152
1153 [[package]]
1153 [[package]]
1154 name = "regex-syntax"
1154 name = "regex-syntax"
1155 version = "0.6.28"
1155 version = "0.6.28"
1156 source = "registry+https://github.com/rust-lang/crates.io-index"
1156 source = "registry+https://github.com/rust-lang/crates.io-index"
1157 checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
1157 checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
1158
1158
1159 [[package]]
1159 [[package]]
1160 name = "remove_dir_all"
1160 name = "remove_dir_all"
1161 version = "0.5.3"
1161 version = "0.5.3"
1162 source = "registry+https://github.com/rust-lang/crates.io-index"
1162 source = "registry+https://github.com/rust-lang/crates.io-index"
1163 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
1163 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
1164 dependencies = [
1164 dependencies = [
1165 "winapi",
1165 "winapi",
1166 ]
1166 ]
1167
1167
1168 [[package]]
1168 [[package]]
1169 name = "rhg"
1169 name = "rhg"
1170 version = "0.1.0"
1170 version = "0.1.0"
1171 dependencies = [
1171 dependencies = [
1172 "atty",
1172 "atty",
1173 "chrono",
1173 "chrono",
1174 "clap",
1174 "clap",
1175 "derive_more",
1175 "derive_more",
1176 "env_logger",
1176 "env_logger",
1177 "format-bytes",
1177 "format-bytes",
1178 "hg-core",
1178 "hg-core",
1179 "home",
1179 "home",
1180 "lazy_static",
1180 "lazy_static",
1181 "libc",
1181 "log",
1182 "log",
1182 "logging_timer",
1183 "logging_timer",
1183 "rayon",
1184 "rayon",
1184 "regex",
1185 "regex",
1185 "shellexpand",
1186 "shellexpand",
1186 "which",
1187 "which",
1187 "whoami",
1188 "whoami",
1188 ]
1189 ]
1189
1190
1190 [[package]]
1191 [[package]]
1191 name = "rustc_version"
1192 name = "rustc_version"
1192 version = "0.4.0"
1193 version = "0.4.0"
1193 source = "registry+https://github.com/rust-lang/crates.io-index"
1194 source = "registry+https://github.com/rust-lang/crates.io-index"
1194 checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
1195 checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
1195 dependencies = [
1196 dependencies = [
1196 "semver",
1197 "semver",
1197 ]
1198 ]
1198
1199
1199 [[package]]
1200 [[package]]
1200 name = "same-file"
1201 name = "same-file"
1201 version = "1.0.6"
1202 version = "1.0.6"
1202 source = "registry+https://github.com/rust-lang/crates.io-index"
1203 source = "registry+https://github.com/rust-lang/crates.io-index"
1203 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
1204 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
1204 dependencies = [
1205 dependencies = [
1205 "winapi-util",
1206 "winapi-util",
1206 ]
1207 ]
1207
1208
1208 [[package]]
1209 [[package]]
1209 name = "scopeguard"
1210 name = "scopeguard"
1210 version = "1.1.0"
1211 version = "1.1.0"
1211 source = "registry+https://github.com/rust-lang/crates.io-index"
1212 source = "registry+https://github.com/rust-lang/crates.io-index"
1212 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
1213 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
1213
1214
1214 [[package]]
1215 [[package]]
1215 name = "scratch"
1216 name = "scratch"
1216 version = "1.0.2"
1217 version = "1.0.2"
1217 source = "registry+https://github.com/rust-lang/crates.io-index"
1218 source = "registry+https://github.com/rust-lang/crates.io-index"
1218 checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
1219 checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
1219
1220
1220 [[package]]
1221 [[package]]
1221 name = "self_cell"
1222 name = "self_cell"
1222 version = "1.0.0"
1223 version = "1.0.0"
1223 source = "registry+https://github.com/rust-lang/crates.io-index"
1224 source = "registry+https://github.com/rust-lang/crates.io-index"
1224 checksum = "4a3926e239738d36060909ffe6f511502f92149a45a1fade7fe031cb2d33e88b"
1225 checksum = "4a3926e239738d36060909ffe6f511502f92149a45a1fade7fe031cb2d33e88b"
1225
1226
1226 [[package]]
1227 [[package]]
1227 name = "semver"
1228 name = "semver"
1228 version = "1.0.14"
1229 version = "1.0.14"
1229 source = "registry+https://github.com/rust-lang/crates.io-index"
1230 source = "registry+https://github.com/rust-lang/crates.io-index"
1230 checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
1231 checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
1231
1232
1232 [[package]]
1233 [[package]]
1233 name = "serde"
1234 name = "serde"
1234 version = "1.0.152"
1235 version = "1.0.152"
1235 source = "registry+https://github.com/rust-lang/crates.io-index"
1236 source = "registry+https://github.com/rust-lang/crates.io-index"
1236 checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
1237 checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
1237 dependencies = [
1238 dependencies = [
1238 "serde_derive",
1239 "serde_derive",
1239 ]
1240 ]
1240
1241
1241 [[package]]
1242 [[package]]
1242 name = "serde_derive"
1243 name = "serde_derive"
1243 version = "1.0.152"
1244 version = "1.0.152"
1244 source = "registry+https://github.com/rust-lang/crates.io-index"
1245 source = "registry+https://github.com/rust-lang/crates.io-index"
1245 checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
1246 checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
1246 dependencies = [
1247 dependencies = [
1247 "proc-macro2",
1248 "proc-macro2",
1248 "quote",
1249 "quote",
1249 "syn",
1250 "syn",
1250 ]
1251 ]
1251
1252
1252 [[package]]
1253 [[package]]
1253 name = "serde_spanned"
1254 name = "serde_spanned"
1254 version = "0.6.1"
1255 version = "0.6.1"
1255 source = "registry+https://github.com/rust-lang/crates.io-index"
1256 source = "registry+https://github.com/rust-lang/crates.io-index"
1256 checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
1257 checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
1257 dependencies = [
1258 dependencies = [
1258 "serde",
1259 "serde",
1259 ]
1260 ]
1260
1261
1261 [[package]]
1262 [[package]]
1262 name = "sha-1"
1263 name = "sha-1"
1263 version = "0.9.8"
1264 version = "0.9.8"
1264 source = "registry+https://github.com/rust-lang/crates.io-index"
1265 source = "registry+https://github.com/rust-lang/crates.io-index"
1265 checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
1266 checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
1266 dependencies = [
1267 dependencies = [
1267 "block-buffer 0.9.0",
1268 "block-buffer 0.9.0",
1268 "cfg-if",
1269 "cfg-if",
1269 "cpufeatures",
1270 "cpufeatures",
1270 "digest 0.9.0",
1271 "digest 0.9.0",
1271 "opaque-debug",
1272 "opaque-debug",
1272 ]
1273 ]
1273
1274
1274 [[package]]
1275 [[package]]
1275 name = "sha-1"
1276 name = "sha-1"
1276 version = "0.10.0"
1277 version = "0.10.0"
1277 source = "registry+https://github.com/rust-lang/crates.io-index"
1278 source = "registry+https://github.com/rust-lang/crates.io-index"
1278 checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
1279 checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
1279 dependencies = [
1280 dependencies = [
1280 "cfg-if",
1281 "cfg-if",
1281 "cpufeatures",
1282 "cpufeatures",
1282 "digest 0.10.5",
1283 "digest 0.10.5",
1283 ]
1284 ]
1284
1285
1285 [[package]]
1286 [[package]]
1286 name = "shellexpand"
1287 name = "shellexpand"
1287 version = "3.1.0"
1288 version = "3.1.0"
1288 source = "registry+https://github.com/rust-lang/crates.io-index"
1289 source = "registry+https://github.com/rust-lang/crates.io-index"
1289 checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
1290 checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
1290 dependencies = [
1291 dependencies = [
1291 "bstr",
1292 "bstr",
1292 "dirs",
1293 "dirs",
1293 "os_str_bytes",
1294 "os_str_bytes",
1294 ]
1295 ]
1295
1296
1296 [[package]]
1297 [[package]]
1297 name = "sized-chunks"
1298 name = "sized-chunks"
1298 version = "0.6.5"
1299 version = "0.6.5"
1299 source = "registry+https://github.com/rust-lang/crates.io-index"
1300 source = "registry+https://github.com/rust-lang/crates.io-index"
1300 checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
1301 checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
1301 dependencies = [
1302 dependencies = [
1302 "bitmaps",
1303 "bitmaps",
1303 "typenum",
1304 "typenum",
1304 ]
1305 ]
1305
1306
1306 [[package]]
1307 [[package]]
1307 name = "stable_deref_trait"
1308 name = "stable_deref_trait"
1308 version = "1.2.0"
1309 version = "1.2.0"
1309 source = "registry+https://github.com/rust-lang/crates.io-index"
1310 source = "registry+https://github.com/rust-lang/crates.io-index"
1310 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1311 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1311
1312
1312 [[package]]
1313 [[package]]
1313 name = "static_assertions"
1314 name = "static_assertions"
1314 version = "1.1.0"
1315 version = "1.1.0"
1315 source = "registry+https://github.com/rust-lang/crates.io-index"
1316 source = "registry+https://github.com/rust-lang/crates.io-index"
1316 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
1317 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
1317
1318
1318 [[package]]
1319 [[package]]
1319 name = "strsim"
1320 name = "strsim"
1320 version = "0.10.0"
1321 version = "0.10.0"
1321 source = "registry+https://github.com/rust-lang/crates.io-index"
1322 source = "registry+https://github.com/rust-lang/crates.io-index"
1322 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
1323 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
1323
1324
1324 [[package]]
1325 [[package]]
1325 name = "syn"
1326 name = "syn"
1326 version = "1.0.109"
1327 version = "1.0.109"
1327 source = "registry+https://github.com/rust-lang/crates.io-index"
1328 source = "registry+https://github.com/rust-lang/crates.io-index"
1328 checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
1329 checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
1329 dependencies = [
1330 dependencies = [
1330 "proc-macro2",
1331 "proc-macro2",
1331 "quote",
1332 "quote",
1332 "unicode-ident",
1333 "unicode-ident",
1333 ]
1334 ]
1334
1335
1335 [[package]]
1336 [[package]]
1336 name = "tap"
1337 name = "tap"
1337 version = "1.0.1"
1338 version = "1.0.1"
1338 source = "registry+https://github.com/rust-lang/crates.io-index"
1339 source = "registry+https://github.com/rust-lang/crates.io-index"
1339 checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
1340 checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
1340
1341
1341 [[package]]
1342 [[package]]
1342 name = "tempfile"
1343 name = "tempfile"
1343 version = "3.3.0"
1344 version = "3.3.0"
1344 source = "registry+https://github.com/rust-lang/crates.io-index"
1345 source = "registry+https://github.com/rust-lang/crates.io-index"
1345 checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
1346 checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
1346 dependencies = [
1347 dependencies = [
1347 "cfg-if",
1348 "cfg-if",
1348 "fastrand",
1349 "fastrand",
1349 "libc",
1350 "libc",
1350 "redox_syscall",
1351 "redox_syscall",
1351 "remove_dir_all",
1352 "remove_dir_all",
1352 "winapi",
1353 "winapi",
1353 ]
1354 ]
1354
1355
1355 [[package]]
1356 [[package]]
1356 name = "termcolor"
1357 name = "termcolor"
1357 version = "1.1.3"
1358 version = "1.1.3"
1358 source = "registry+https://github.com/rust-lang/crates.io-index"
1359 source = "registry+https://github.com/rust-lang/crates.io-index"
1359 checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
1360 checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
1360 dependencies = [
1361 dependencies = [
1361 "winapi-util",
1362 "winapi-util",
1362 ]
1363 ]
1363
1364
1364 [[package]]
1365 [[package]]
1365 name = "thiserror"
1366 name = "thiserror"
1366 version = "1.0.39"
1367 version = "1.0.39"
1367 source = "registry+https://github.com/rust-lang/crates.io-index"
1368 source = "registry+https://github.com/rust-lang/crates.io-index"
1368 checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
1369 checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
1369 dependencies = [
1370 dependencies = [
1370 "thiserror-impl",
1371 "thiserror-impl",
1371 ]
1372 ]
1372
1373
1373 [[package]]
1374 [[package]]
1374 name = "thiserror-impl"
1375 name = "thiserror-impl"
1375 version = "1.0.39"
1376 version = "1.0.39"
1376 source = "registry+https://github.com/rust-lang/crates.io-index"
1377 source = "registry+https://github.com/rust-lang/crates.io-index"
1377 checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
1378 checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
1378 dependencies = [
1379 dependencies = [
1379 "proc-macro2",
1380 "proc-macro2",
1380 "quote",
1381 "quote",
1381 "syn",
1382 "syn",
1382 ]
1383 ]
1383
1384
1384 [[package]]
1385 [[package]]
1385 name = "thread_local"
1386 name = "thread_local"
1386 version = "1.1.4"
1387 version = "1.1.4"
1387 source = "registry+https://github.com/rust-lang/crates.io-index"
1388 source = "registry+https://github.com/rust-lang/crates.io-index"
1388 checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
1389 checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
1389 dependencies = [
1390 dependencies = [
1390 "once_cell",
1391 "once_cell",
1391 ]
1392 ]
1392
1393
1393 [[package]]
1394 [[package]]
1394 name = "toml"
1395 name = "toml"
1395 version = "0.6.0"
1396 version = "0.6.0"
1396 source = "registry+https://github.com/rust-lang/crates.io-index"
1397 source = "registry+https://github.com/rust-lang/crates.io-index"
1397 checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
1398 checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
1398 dependencies = [
1399 dependencies = [
1399 "serde",
1400 "serde",
1400 "serde_spanned",
1401 "serde_spanned",
1401 "toml_datetime",
1402 "toml_datetime",
1402 "toml_edit",
1403 "toml_edit",
1403 ]
1404 ]
1404
1405
1405 [[package]]
1406 [[package]]
1406 name = "toml_datetime"
1407 name = "toml_datetime"
1407 version = "0.5.1"
1408 version = "0.5.1"
1408 source = "registry+https://github.com/rust-lang/crates.io-index"
1409 source = "registry+https://github.com/rust-lang/crates.io-index"
1409 checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
1410 checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
1410 dependencies = [
1411 dependencies = [
1411 "serde",
1412 "serde",
1412 ]
1413 ]
1413
1414
1414 [[package]]
1415 [[package]]
1415 name = "toml_edit"
1416 name = "toml_edit"
1416 version = "0.18.1"
1417 version = "0.18.1"
1417 source = "registry+https://github.com/rust-lang/crates.io-index"
1418 source = "registry+https://github.com/rust-lang/crates.io-index"
1418 checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
1419 checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
1419 dependencies = [
1420 dependencies = [
1420 "indexmap",
1421 "indexmap",
1421 "nom8",
1422 "nom8",
1422 "serde",
1423 "serde",
1423 "serde_spanned",
1424 "serde_spanned",
1424 "toml_datetime",
1425 "toml_datetime",
1425 ]
1426 ]
1426
1427
1427 [[package]]
1428 [[package]]
1428 name = "twox-hash"
1429 name = "twox-hash"
1429 version = "1.6.3"
1430 version = "1.6.3"
1430 source = "registry+https://github.com/rust-lang/crates.io-index"
1431 source = "registry+https://github.com/rust-lang/crates.io-index"
1431 checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
1432 checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
1432 dependencies = [
1433 dependencies = [
1433 "cfg-if",
1434 "cfg-if",
1434 "rand 0.8.5",
1435 "rand 0.8.5",
1435 "static_assertions",
1436 "static_assertions",
1436 ]
1437 ]
1437
1438
1438 [[package]]
1439 [[package]]
1439 name = "typenum"
1440 name = "typenum"
1440 version = "1.15.0"
1441 version = "1.15.0"
1441 source = "registry+https://github.com/rust-lang/crates.io-index"
1442 source = "registry+https://github.com/rust-lang/crates.io-index"
1442 checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
1443 checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
1443
1444
1444 [[package]]
1445 [[package]]
1445 name = "unicode-ident"
1446 name = "unicode-ident"
1446 version = "1.0.5"
1447 version = "1.0.5"
1447 source = "registry+https://github.com/rust-lang/crates.io-index"
1448 source = "registry+https://github.com/rust-lang/crates.io-index"
1448 checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
1449 checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
1449
1450
1450 [[package]]
1451 [[package]]
1451 name = "unicode-width"
1452 name = "unicode-width"
1452 version = "0.1.10"
1453 version = "0.1.10"
1453 source = "registry+https://github.com/rust-lang/crates.io-index"
1454 source = "registry+https://github.com/rust-lang/crates.io-index"
1454 checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
1455 checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
1455
1456
1456 [[package]]
1457 [[package]]
1457 name = "vcpkg"
1458 name = "vcpkg"
1458 version = "0.2.15"
1459 version = "0.2.15"
1459 source = "registry+https://github.com/rust-lang/crates.io-index"
1460 source = "registry+https://github.com/rust-lang/crates.io-index"
1460 checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
1461 checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
1461
1462
1462 [[package]]
1463 [[package]]
1463 name = "vcsgraph"
1464 name = "vcsgraph"
1464 version = "0.2.0"
1465 version = "0.2.0"
1465 source = "registry+https://github.com/rust-lang/crates.io-index"
1466 source = "registry+https://github.com/rust-lang/crates.io-index"
1466 checksum = "4cb68c231e2575f7503a7c19213875f9d4ec2e84e963a56ce3de4b6bee351ef7"
1467 checksum = "4cb68c231e2575f7503a7c19213875f9d4ec2e84e963a56ce3de4b6bee351ef7"
1467 dependencies = [
1468 dependencies = [
1468 "hex",
1469 "hex",
1469 "rand 0.7.3",
1470 "rand 0.7.3",
1470 "sha-1 0.9.8",
1471 "sha-1 0.9.8",
1471 ]
1472 ]
1472
1473
1473 [[package]]
1474 [[package]]
1474 name = "version_check"
1475 name = "version_check"
1475 version = "0.9.4"
1476 version = "0.9.4"
1476 source = "registry+https://github.com/rust-lang/crates.io-index"
1477 source = "registry+https://github.com/rust-lang/crates.io-index"
1477 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
1478 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
1478
1479
1479 [[package]]
1480 [[package]]
1480 name = "wasi"
1481 name = "wasi"
1481 version = "0.9.0+wasi-snapshot-preview1"
1482 version = "0.9.0+wasi-snapshot-preview1"
1482 source = "registry+https://github.com/rust-lang/crates.io-index"
1483 source = "registry+https://github.com/rust-lang/crates.io-index"
1483 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1484 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1484
1485
1485 [[package]]
1486 [[package]]
1486 name = "wasi"
1487 name = "wasi"
1487 version = "0.11.0+wasi-snapshot-preview1"
1488 version = "0.11.0+wasi-snapshot-preview1"
1488 source = "registry+https://github.com/rust-lang/crates.io-index"
1489 source = "registry+https://github.com/rust-lang/crates.io-index"
1489 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
1490 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
1490
1491
1491 [[package]]
1492 [[package]]
1492 name = "wasm-bindgen"
1493 name = "wasm-bindgen"
1493 version = "0.2.83"
1494 version = "0.2.83"
1494 source = "registry+https://github.com/rust-lang/crates.io-index"
1495 source = "registry+https://github.com/rust-lang/crates.io-index"
1495 checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
1496 checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
1496 dependencies = [
1497 dependencies = [
1497 "cfg-if",
1498 "cfg-if",
1498 "wasm-bindgen-macro",
1499 "wasm-bindgen-macro",
1499 ]
1500 ]
1500
1501
1501 [[package]]
1502 [[package]]
1502 name = "wasm-bindgen-backend"
1503 name = "wasm-bindgen-backend"
1503 version = "0.2.83"
1504 version = "0.2.83"
1504 source = "registry+https://github.com/rust-lang/crates.io-index"
1505 source = "registry+https://github.com/rust-lang/crates.io-index"
1505 checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
1506 checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
1506 dependencies = [
1507 dependencies = [
1507 "bumpalo",
1508 "bumpalo",
1508 "log",
1509 "log",
1509 "once_cell",
1510 "once_cell",
1510 "proc-macro2",
1511 "proc-macro2",
1511 "quote",
1512 "quote",
1512 "syn",
1513 "syn",
1513 "wasm-bindgen-shared",
1514 "wasm-bindgen-shared",
1514 ]
1515 ]
1515
1516
1516 [[package]]
1517 [[package]]
1517 name = "wasm-bindgen-macro"
1518 name = "wasm-bindgen-macro"
1518 version = "0.2.83"
1519 version = "0.2.83"
1519 source = "registry+https://github.com/rust-lang/crates.io-index"
1520 source = "registry+https://github.com/rust-lang/crates.io-index"
1520 checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
1521 checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
1521 dependencies = [
1522 dependencies = [
1522 "quote",
1523 "quote",
1523 "wasm-bindgen-macro-support",
1524 "wasm-bindgen-macro-support",
1524 ]
1525 ]
1525
1526
1526 [[package]]
1527 [[package]]
1527 name = "wasm-bindgen-macro-support"
1528 name = "wasm-bindgen-macro-support"
1528 version = "0.2.83"
1529 version = "0.2.83"
1529 source = "registry+https://github.com/rust-lang/crates.io-index"
1530 source = "registry+https://github.com/rust-lang/crates.io-index"
1530 checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
1531 checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
1531 dependencies = [
1532 dependencies = [
1532 "proc-macro2",
1533 "proc-macro2",
1533 "quote",
1534 "quote",
1534 "syn",
1535 "syn",
1535 "wasm-bindgen-backend",
1536 "wasm-bindgen-backend",
1536 "wasm-bindgen-shared",
1537 "wasm-bindgen-shared",
1537 ]
1538 ]
1538
1539
1539 [[package]]
1540 [[package]]
1540 name = "wasm-bindgen-shared"
1541 name = "wasm-bindgen-shared"
1541 version = "0.2.83"
1542 version = "0.2.83"
1542 source = "registry+https://github.com/rust-lang/crates.io-index"
1543 source = "registry+https://github.com/rust-lang/crates.io-index"
1543 checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
1544 checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
1544
1545
1545 [[package]]
1546 [[package]]
1546 name = "web-sys"
1547 name = "web-sys"
1547 version = "0.3.60"
1548 version = "0.3.60"
1548 source = "registry+https://github.com/rust-lang/crates.io-index"
1549 source = "registry+https://github.com/rust-lang/crates.io-index"
1549 checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
1550 checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
1550 dependencies = [
1551 dependencies = [
1551 "js-sys",
1552 "js-sys",
1552 "wasm-bindgen",
1553 "wasm-bindgen",
1553 ]
1554 ]
1554
1555
1555 [[package]]
1556 [[package]]
1556 name = "which"
1557 name = "which"
1557 version = "4.3.0"
1558 version = "4.3.0"
1558 source = "registry+https://github.com/rust-lang/crates.io-index"
1559 source = "registry+https://github.com/rust-lang/crates.io-index"
1559 checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
1560 checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
1560 dependencies = [
1561 dependencies = [
1561 "either",
1562 "either",
1562 "libc",
1563 "libc",
1563 "once_cell",
1564 "once_cell",
1564 ]
1565 ]
1565
1566
1566 [[package]]
1567 [[package]]
1567 name = "whoami"
1568 name = "whoami"
1568 version = "1.4.0"
1569 version = "1.4.0"
1569 source = "registry+https://github.com/rust-lang/crates.io-index"
1570 source = "registry+https://github.com/rust-lang/crates.io-index"
1570 checksum = "2c70234412ca409cc04e864e89523cb0fc37f5e1344ebed5a3ebf4192b6b9f68"
1571 checksum = "2c70234412ca409cc04e864e89523cb0fc37f5e1344ebed5a3ebf4192b6b9f68"
1571 dependencies = [
1572 dependencies = [
1572 "wasm-bindgen",
1573 "wasm-bindgen",
1573 "web-sys",
1574 "web-sys",
1574 ]
1575 ]
1575
1576
1576 [[package]]
1577 [[package]]
1577 name = "winapi"
1578 name = "winapi"
1578 version = "0.3.9"
1579 version = "0.3.9"
1579 source = "registry+https://github.com/rust-lang/crates.io-index"
1580 source = "registry+https://github.com/rust-lang/crates.io-index"
1580 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1581 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1581 dependencies = [
1582 dependencies = [
1582 "winapi-i686-pc-windows-gnu",
1583 "winapi-i686-pc-windows-gnu",
1583 "winapi-x86_64-pc-windows-gnu",
1584 "winapi-x86_64-pc-windows-gnu",
1584 ]
1585 ]
1585
1586
1586 [[package]]
1587 [[package]]
1587 name = "winapi-i686-pc-windows-gnu"
1588 name = "winapi-i686-pc-windows-gnu"
1588 version = "0.4.0"
1589 version = "0.4.0"
1589 source = "registry+https://github.com/rust-lang/crates.io-index"
1590 source = "registry+https://github.com/rust-lang/crates.io-index"
1590 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1591 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1591
1592
1592 [[package]]
1593 [[package]]
1593 name = "winapi-util"
1594 name = "winapi-util"
1594 version = "0.1.5"
1595 version = "0.1.5"
1595 source = "registry+https://github.com/rust-lang/crates.io-index"
1596 source = "registry+https://github.com/rust-lang/crates.io-index"
1596 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1597 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1597 dependencies = [
1598 dependencies = [
1598 "winapi",
1599 "winapi",
1599 ]
1600 ]
1600
1601
1601 [[package]]
1602 [[package]]
1602 name = "winapi-x86_64-pc-windows-gnu"
1603 name = "winapi-x86_64-pc-windows-gnu"
1603 version = "0.4.0"
1604 version = "0.4.0"
1604 source = "registry+https://github.com/rust-lang/crates.io-index"
1605 source = "registry+https://github.com/rust-lang/crates.io-index"
1605 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1606 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1606
1607
1607 [[package]]
1608 [[package]]
1608 name = "windows-sys"
1609 name = "windows-sys"
1609 version = "0.48.0"
1610 version = "0.48.0"
1610 source = "registry+https://github.com/rust-lang/crates.io-index"
1611 source = "registry+https://github.com/rust-lang/crates.io-index"
1611 checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
1612 checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
1612 dependencies = [
1613 dependencies = [
1613 "windows-targets 0.48.5",
1614 "windows-targets 0.48.5",
1614 ]
1615 ]
1615
1616
1616 [[package]]
1617 [[package]]
1617 name = "windows-targets"
1618 name = "windows-targets"
1618 version = "0.48.5"
1619 version = "0.48.5"
1619 source = "registry+https://github.com/rust-lang/crates.io-index"
1620 source = "registry+https://github.com/rust-lang/crates.io-index"
1620 checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
1621 checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
1621 dependencies = [
1622 dependencies = [
1622 "windows_aarch64_gnullvm 0.48.5",
1623 "windows_aarch64_gnullvm 0.48.5",
1623 "windows_aarch64_msvc 0.48.5",
1624 "windows_aarch64_msvc 0.48.5",
1624 "windows_i686_gnu 0.48.5",
1625 "windows_i686_gnu 0.48.5",
1625 "windows_i686_msvc 0.48.5",
1626 "windows_i686_msvc 0.48.5",
1626 "windows_x86_64_gnu 0.48.5",
1627 "windows_x86_64_gnu 0.48.5",
1627 "windows_x86_64_gnullvm 0.48.5",
1628 "windows_x86_64_gnullvm 0.48.5",
1628 "windows_x86_64_msvc 0.48.5",
1629 "windows_x86_64_msvc 0.48.5",
1629 ]
1630 ]
1630
1631
1631 [[package]]
1632 [[package]]
1632 name = "windows-targets"
1633 name = "windows-targets"
1633 version = "0.52.0"
1634 version = "0.52.0"
1634 source = "registry+https://github.com/rust-lang/crates.io-index"
1635 source = "registry+https://github.com/rust-lang/crates.io-index"
1635 checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
1636 checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
1636 dependencies = [
1637 dependencies = [
1637 "windows_aarch64_gnullvm 0.52.0",
1638 "windows_aarch64_gnullvm 0.52.0",
1638 "windows_aarch64_msvc 0.52.0",
1639 "windows_aarch64_msvc 0.52.0",
1639 "windows_i686_gnu 0.52.0",
1640 "windows_i686_gnu 0.52.0",
1640 "windows_i686_msvc 0.52.0",
1641 "windows_i686_msvc 0.52.0",
1641 "windows_x86_64_gnu 0.52.0",
1642 "windows_x86_64_gnu 0.52.0",
1642 "windows_x86_64_gnullvm 0.52.0",
1643 "windows_x86_64_gnullvm 0.52.0",
1643 "windows_x86_64_msvc 0.52.0",
1644 "windows_x86_64_msvc 0.52.0",
1644 ]
1645 ]
1645
1646
1646 [[package]]
1647 [[package]]
1647 name = "windows_aarch64_gnullvm"
1648 name = "windows_aarch64_gnullvm"
1648 version = "0.48.5"
1649 version = "0.48.5"
1649 source = "registry+https://github.com/rust-lang/crates.io-index"
1650 source = "registry+https://github.com/rust-lang/crates.io-index"
1650 checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
1651 checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
1651
1652
1652 [[package]]
1653 [[package]]
1653 name = "windows_aarch64_gnullvm"
1654 name = "windows_aarch64_gnullvm"
1654 version = "0.52.0"
1655 version = "0.52.0"
1655 source = "registry+https://github.com/rust-lang/crates.io-index"
1656 source = "registry+https://github.com/rust-lang/crates.io-index"
1656 checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
1657 checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
1657
1658
1658 [[package]]
1659 [[package]]
1659 name = "windows_aarch64_msvc"
1660 name = "windows_aarch64_msvc"
1660 version = "0.48.5"
1661 version = "0.48.5"
1661 source = "registry+https://github.com/rust-lang/crates.io-index"
1662 source = "registry+https://github.com/rust-lang/crates.io-index"
1662 checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
1663 checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
1663
1664
1664 [[package]]
1665 [[package]]
1665 name = "windows_aarch64_msvc"
1666 name = "windows_aarch64_msvc"
1666 version = "0.52.0"
1667 version = "0.52.0"
1667 source = "registry+https://github.com/rust-lang/crates.io-index"
1668 source = "registry+https://github.com/rust-lang/crates.io-index"
1668 checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
1669 checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
1669
1670
1670 [[package]]
1671 [[package]]
1671 name = "windows_i686_gnu"
1672 name = "windows_i686_gnu"
1672 version = "0.48.5"
1673 version = "0.48.5"
1673 source = "registry+https://github.com/rust-lang/crates.io-index"
1674 source = "registry+https://github.com/rust-lang/crates.io-index"
1674 checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
1675 checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
1675
1676
1676 [[package]]
1677 [[package]]
1677 name = "windows_i686_gnu"
1678 name = "windows_i686_gnu"
1678 version = "0.52.0"
1679 version = "0.52.0"
1679 source = "registry+https://github.com/rust-lang/crates.io-index"
1680 source = "registry+https://github.com/rust-lang/crates.io-index"
1680 checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
1681 checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
1681
1682
1682 [[package]]
1683 [[package]]
1683 name = "windows_i686_msvc"
1684 name = "windows_i686_msvc"
1684 version = "0.48.5"
1685 version = "0.48.5"
1685 source = "registry+https://github.com/rust-lang/crates.io-index"
1686 source = "registry+https://github.com/rust-lang/crates.io-index"
1686 checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
1687 checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
1687
1688
1688 [[package]]
1689 [[package]]
1689 name = "windows_i686_msvc"
1690 name = "windows_i686_msvc"
1690 version = "0.52.0"
1691 version = "0.52.0"
1691 source = "registry+https://github.com/rust-lang/crates.io-index"
1692 source = "registry+https://github.com/rust-lang/crates.io-index"
1692 checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
1693 checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
1693
1694
1694 [[package]]
1695 [[package]]
1695 name = "windows_x86_64_gnu"
1696 name = "windows_x86_64_gnu"
1696 version = "0.48.5"
1697 version = "0.48.5"
1697 source = "registry+https://github.com/rust-lang/crates.io-index"
1698 source = "registry+https://github.com/rust-lang/crates.io-index"
1698 checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
1699 checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
1699
1700
1700 [[package]]
1701 [[package]]
1701 name = "windows_x86_64_gnu"
1702 name = "windows_x86_64_gnu"
1702 version = "0.52.0"
1703 version = "0.52.0"
1703 source = "registry+https://github.com/rust-lang/crates.io-index"
1704 source = "registry+https://github.com/rust-lang/crates.io-index"
1704 checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
1705 checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
1705
1706
1706 [[package]]
1707 [[package]]
1707 name = "windows_x86_64_gnullvm"
1708 name = "windows_x86_64_gnullvm"
1708 version = "0.48.5"
1709 version = "0.48.5"
1709 source = "registry+https://github.com/rust-lang/crates.io-index"
1710 source = "registry+https://github.com/rust-lang/crates.io-index"
1710 checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
1711 checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
1711
1712
1712 [[package]]
1713 [[package]]
1713 name = "windows_x86_64_gnullvm"
1714 name = "windows_x86_64_gnullvm"
1714 version = "0.52.0"
1715 version = "0.52.0"
1715 source = "registry+https://github.com/rust-lang/crates.io-index"
1716 source = "registry+https://github.com/rust-lang/crates.io-index"
1716 checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
1717 checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
1717
1718
1718 [[package]]
1719 [[package]]
1719 name = "windows_x86_64_msvc"
1720 name = "windows_x86_64_msvc"
1720 version = "0.48.5"
1721 version = "0.48.5"
1721 source = "registry+https://github.com/rust-lang/crates.io-index"
1722 source = "registry+https://github.com/rust-lang/crates.io-index"
1722 checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
1723 checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
1723
1724
1724 [[package]]
1725 [[package]]
1725 name = "windows_x86_64_msvc"
1726 name = "windows_x86_64_msvc"
1726 version = "0.52.0"
1727 version = "0.52.0"
1727 source = "registry+https://github.com/rust-lang/crates.io-index"
1728 source = "registry+https://github.com/rust-lang/crates.io-index"
1728 checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
1729 checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
1729
1730
1730 [[package]]
1731 [[package]]
1731 name = "wyz"
1732 name = "wyz"
1732 version = "0.5.1"
1733 version = "0.5.1"
1733 source = "registry+https://github.com/rust-lang/crates.io-index"
1734 source = "registry+https://github.com/rust-lang/crates.io-index"
1734 checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
1735 checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
1735 dependencies = [
1736 dependencies = [
1736 "tap",
1737 "tap",
1737 ]
1738 ]
1738
1739
1739 [[package]]
1740 [[package]]
1740 name = "yansi"
1741 name = "yansi"
1741 version = "0.5.1"
1742 version = "0.5.1"
1742 source = "registry+https://github.com/rust-lang/crates.io-index"
1743 source = "registry+https://github.com/rust-lang/crates.io-index"
1743 checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
1744 checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
1744
1745
1745 [[package]]
1746 [[package]]
1746 name = "zstd"
1747 name = "zstd"
1747 version = "0.12.3+zstd.1.5.2"
1748 version = "0.12.3+zstd.1.5.2"
1748 source = "registry+https://github.com/rust-lang/crates.io-index"
1749 source = "registry+https://github.com/rust-lang/crates.io-index"
1749 checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
1750 checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
1750 dependencies = [
1751 dependencies = [
1751 "zstd-safe",
1752 "zstd-safe",
1752 ]
1753 ]
1753
1754
1754 [[package]]
1755 [[package]]
1755 name = "zstd-safe"
1756 name = "zstd-safe"
1756 version = "6.0.4+zstd.1.5.4"
1757 version = "6.0.4+zstd.1.5.4"
1757 source = "registry+https://github.com/rust-lang/crates.io-index"
1758 source = "registry+https://github.com/rust-lang/crates.io-index"
1758 checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
1759 checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
1759 dependencies = [
1760 dependencies = [
1760 "libc",
1761 "libc",
1761 "zstd-sys",
1762 "zstd-sys",
1762 ]
1763 ]
1763
1764
1764 [[package]]
1765 [[package]]
1765 name = "zstd-sys"
1766 name = "zstd-sys"
1766 version = "2.0.7+zstd.1.5.4"
1767 version = "2.0.7+zstd.1.5.4"
1767 source = "registry+https://github.com/rust-lang/crates.io-index"
1768 source = "registry+https://github.com/rust-lang/crates.io-index"
1768 checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
1769 checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
1769 dependencies = [
1770 dependencies = [
1770 "cc",
1771 "cc",
1771 "libc",
1772 "libc",
1772 "pkg-config",
1773 "pkg-config",
1773 ]
1774 ]
@@ -1,26 +1,27
1 [package]
1 [package]
2 name = "rhg"
2 name = "rhg"
3 version = "0.1.0"
3 version = "0.1.0"
4 authors = [
4 authors = [
5 "Antoine Cezar <antoine.cezar@octobus.net>",
5 "Antoine Cezar <antoine.cezar@octobus.net>",
6 "Raphaël Gomès <raphael.gomes@octobus.net>",
6 "Raphaël Gomès <raphael.gomes@octobus.net>",
7 ]
7 ]
8 edition = "2021"
8 edition = "2021"
9
9
10 [dependencies]
10 [dependencies]
11 atty = "0.2.14"
11 atty = "0.2.14"
12 hg-core = { path = "../hg-core"}
12 hg-core = { path = "../hg-core"}
13 chrono = "0.4.23"
13 chrono = "0.4.23"
14 clap = { version = "4.0.24", features = ["cargo"] }
14 clap = { version = "4.0.24", features = ["cargo"] }
15 derive_more = "0.99.17"
15 derive_more = "0.99.17"
16 home = "0.5.4"
16 home = "0.5.4"
17 lazy_static = "1.4.0"
17 lazy_static = "1.4.0"
18 log = "0.4.17"
18 log = "0.4.17"
19 logging_timer = "1.1.0"
19 logging_timer = "1.1.0"
20 regex = "1.7.0"
20 regex = "1.7.0"
21 env_logger = "0.9.3"
21 env_logger = "0.9.3"
22 format-bytes = "0.3.0"
22 format-bytes = "0.3.0"
23 shellexpand = { version = "3.1", features = ["full"]}
23 shellexpand = { version = "3.1", features = ["full"]}
24 whoami = "1.4"
24 whoami = "1.4"
25 which = "4.3.0"
25 which = "4.3.0"
26 rayon = "1.7.0"
26 rayon = "1.7.0"
27 libc = "0.2.155"
@@ -1,821 +1,825
1 // status.rs
1 // status.rs
2 //
2 //
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::error::CommandError;
8 use crate::error::CommandError;
9 use crate::ui::{
9 use crate::ui::{
10 format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths,
10 format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths,
11 RelativePaths, Ui,
11 RelativePaths, Ui,
12 };
12 };
13 use crate::utils::path_utils::RelativizePaths;
13 use crate::utils::path_utils::RelativizePaths;
14 use clap::Arg;
14 use clap::Arg;
15 use format_bytes::format_bytes;
15 use format_bytes::format_bytes;
16 use hg::config::Config;
16 use hg::config::Config;
17 use hg::dirstate::has_exec_bit;
17 use hg::dirstate::has_exec_bit;
18 use hg::dirstate::status::StatusPath;
18 use hg::dirstate::status::StatusPath;
19 use hg::dirstate::TruncatedTimestamp;
19 use hg::dirstate::TruncatedTimestamp;
20 use hg::errors::{HgError, IoResultExt};
20 use hg::errors::{HgError, IoResultExt};
21 use hg::filepatterns::parse_pattern_args;
21 use hg::filepatterns::parse_pattern_args;
22 use hg::lock::LockError;
22 use hg::lock::LockError;
23 use hg::manifest::Manifest;
23 use hg::manifest::Manifest;
24 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
24 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
25 use hg::repo::Repo;
25 use hg::repo::Repo;
26 use hg::utils::debug::debug_wait_for_file;
26 use hg::utils::debug::debug_wait_for_file;
27 use hg::utils::files::{
27 use hg::utils::files::{
28 get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes,
28 get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes,
29 };
29 };
30 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
30 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
31 use hg::PatternFileWarning;
31 use hg::PatternFileWarning;
32 use hg::Revision;
32 use hg::Revision;
33 use hg::StatusError;
33 use hg::StatusError;
34 use hg::StatusOptions;
34 use hg::StatusOptions;
35 use hg::{self, narrow, sparse};
35 use hg::{self, narrow, sparse};
36 use hg::{DirstateStatus, RevlogOpenOptions};
36 use hg::{DirstateStatus, RevlogOpenOptions};
37 use log::info;
37 use log::info;
38 use rayon::prelude::*;
38 use rayon::prelude::*;
39 use std::borrow::Cow;
39 use std::borrow::Cow;
40 use std::io;
40 use std::io;
41 use std::mem::take;
41 use std::mem::take;
42 use std::path::PathBuf;
42 use std::path::PathBuf;
43
43
44 pub const HELP_TEXT: &str = "
44 pub const HELP_TEXT: &str = "
45 Show changed files in the working directory
45 Show changed files in the working directory
46
46
47 This is a pure Rust version of `hg status`.
47 This is a pure Rust version of `hg status`.
48
48
49 Some options might be missing, check the list below.
49 Some options might be missing, check the list below.
50 ";
50 ";
51
51
52 pub fn args() -> clap::Command {
52 pub fn args() -> clap::Command {
53 clap::command!("status")
53 clap::command!("status")
54 .alias("st")
54 .alias("st")
55 .about(HELP_TEXT)
55 .about(HELP_TEXT)
56 .arg(
56 .arg(
57 Arg::new("file")
57 Arg::new("file")
58 .value_parser(clap::value_parser!(std::ffi::OsString))
58 .value_parser(clap::value_parser!(std::ffi::OsString))
59 .help("show only these files")
59 .help("show only these files")
60 .action(clap::ArgAction::Append),
60 .action(clap::ArgAction::Append),
61 )
61 )
62 .arg(
62 .arg(
63 Arg::new("all")
63 Arg::new("all")
64 .help("show status of all files")
64 .help("show status of all files")
65 .short('A')
65 .short('A')
66 .action(clap::ArgAction::SetTrue)
66 .action(clap::ArgAction::SetTrue)
67 .long("all"),
67 .long("all"),
68 )
68 )
69 .arg(
69 .arg(
70 Arg::new("modified")
70 Arg::new("modified")
71 .help("show only modified files")
71 .help("show only modified files")
72 .short('m')
72 .short('m')
73 .action(clap::ArgAction::SetTrue)
73 .action(clap::ArgAction::SetTrue)
74 .long("modified"),
74 .long("modified"),
75 )
75 )
76 .arg(
76 .arg(
77 Arg::new("added")
77 Arg::new("added")
78 .help("show only added files")
78 .help("show only added files")
79 .short('a')
79 .short('a')
80 .action(clap::ArgAction::SetTrue)
80 .action(clap::ArgAction::SetTrue)
81 .long("added"),
81 .long("added"),
82 )
82 )
83 .arg(
83 .arg(
84 Arg::new("removed")
84 Arg::new("removed")
85 .help("show only removed files")
85 .help("show only removed files")
86 .short('r')
86 .short('r')
87 .action(clap::ArgAction::SetTrue)
87 .action(clap::ArgAction::SetTrue)
88 .long("removed"),
88 .long("removed"),
89 )
89 )
90 .arg(
90 .arg(
91 Arg::new("clean")
91 Arg::new("clean")
92 .help("show only clean files")
92 .help("show only clean files")
93 .short('c')
93 .short('c')
94 .action(clap::ArgAction::SetTrue)
94 .action(clap::ArgAction::SetTrue)
95 .long("clean"),
95 .long("clean"),
96 )
96 )
97 .arg(
97 .arg(
98 Arg::new("deleted")
98 Arg::new("deleted")
99 .help("show only deleted files")
99 .help("show only deleted files")
100 .short('d')
100 .short('d')
101 .action(clap::ArgAction::SetTrue)
101 .action(clap::ArgAction::SetTrue)
102 .long("deleted"),
102 .long("deleted"),
103 )
103 )
104 .arg(
104 .arg(
105 Arg::new("unknown")
105 Arg::new("unknown")
106 .help("show only unknown (not tracked) files")
106 .help("show only unknown (not tracked) files")
107 .short('u')
107 .short('u')
108 .action(clap::ArgAction::SetTrue)
108 .action(clap::ArgAction::SetTrue)
109 .long("unknown"),
109 .long("unknown"),
110 )
110 )
111 .arg(
111 .arg(
112 Arg::new("ignored")
112 Arg::new("ignored")
113 .help("show only ignored files")
113 .help("show only ignored files")
114 .short('i')
114 .short('i')
115 .action(clap::ArgAction::SetTrue)
115 .action(clap::ArgAction::SetTrue)
116 .long("ignored"),
116 .long("ignored"),
117 )
117 )
118 .arg(
118 .arg(
119 Arg::new("copies")
119 Arg::new("copies")
120 .help("show source of copied files (DEFAULT: ui.statuscopies)")
120 .help("show source of copied files (DEFAULT: ui.statuscopies)")
121 .short('C')
121 .short('C')
122 .action(clap::ArgAction::SetTrue)
122 .action(clap::ArgAction::SetTrue)
123 .long("copies"),
123 .long("copies"),
124 )
124 )
125 .arg(
125 .arg(
126 Arg::new("print0")
126 Arg::new("print0")
127 .help("end filenames with NUL, for use with xargs")
127 .help("end filenames with NUL, for use with xargs")
128 .short('0')
128 .short('0')
129 .action(clap::ArgAction::SetTrue)
129 .action(clap::ArgAction::SetTrue)
130 .long("print0"),
130 .long("print0"),
131 )
131 )
132 .arg(
132 .arg(
133 Arg::new("no-status")
133 Arg::new("no-status")
134 .help("hide status prefix")
134 .help("hide status prefix")
135 .short('n')
135 .short('n')
136 .action(clap::ArgAction::SetTrue)
136 .action(clap::ArgAction::SetTrue)
137 .long("no-status"),
137 .long("no-status"),
138 )
138 )
139 .arg(
139 .arg(
140 Arg::new("verbose")
140 Arg::new("verbose")
141 .help("enable additional output")
141 .help("enable additional output")
142 .short('v')
142 .short('v')
143 .action(clap::ArgAction::SetTrue)
143 .action(clap::ArgAction::SetTrue)
144 .long("verbose"),
144 .long("verbose"),
145 )
145 )
146 .arg(
146 .arg(
147 Arg::new("rev")
147 Arg::new("rev")
148 .help("show difference from/to revision")
148 .help("show difference from/to revision")
149 .long("rev")
149 .long("rev")
150 .num_args(1)
150 .num_args(1)
151 .action(clap::ArgAction::Append)
151 .action(clap::ArgAction::Append)
152 .value_name("REV"),
152 .value_name("REV"),
153 )
153 )
154 }
154 }
155
155
156 fn parse_revpair(
156 fn parse_revpair(
157 repo: &Repo,
157 repo: &Repo,
158 revs: Option<Vec<String>>,
158 revs: Option<Vec<String>>,
159 ) -> Result<Option<(Revision, Revision)>, CommandError> {
159 ) -> Result<Option<(Revision, Revision)>, CommandError> {
160 let revs = match revs {
160 let revs = match revs {
161 None => return Ok(None),
161 None => return Ok(None),
162 Some(revs) => revs,
162 Some(revs) => revs,
163 };
163 };
164 if revs.is_empty() {
164 if revs.is_empty() {
165 return Ok(None);
165 return Ok(None);
166 }
166 }
167 if revs.len() != 2 {
167 if revs.len() != 2 {
168 return Err(CommandError::unsupported("expected 0 or 2 --rev flags"));
168 return Err(CommandError::unsupported("expected 0 or 2 --rev flags"));
169 }
169 }
170
170
171 let rev1 = &revs[0];
171 let rev1 = &revs[0];
172 let rev2 = &revs[1];
172 let rev2 = &revs[1];
173 let rev1 = hg::revset::resolve_single(rev1, repo)
173 let rev1 = hg::revset::resolve_single(rev1, repo)
174 .map_err(|e| (e, rev1.as_str()))?;
174 .map_err(|e| (e, rev1.as_str()))?;
175 let rev2 = hg::revset::resolve_single(rev2, repo)
175 let rev2 = hg::revset::resolve_single(rev2, repo)
176 .map_err(|e| (e, rev2.as_str()))?;
176 .map_err(|e| (e, rev2.as_str()))?;
177 Ok(Some((rev1, rev2)))
177 Ok(Some((rev1, rev2)))
178 }
178 }
179
179
180 /// Pure data type allowing the caller to specify file states to display
180 /// Pure data type allowing the caller to specify file states to display
181 #[derive(Copy, Clone, Debug)]
181 #[derive(Copy, Clone, Debug)]
182 pub struct DisplayStates {
182 pub struct DisplayStates {
183 pub modified: bool,
183 pub modified: bool,
184 pub added: bool,
184 pub added: bool,
185 pub removed: bool,
185 pub removed: bool,
186 pub clean: bool,
186 pub clean: bool,
187 pub deleted: bool,
187 pub deleted: bool,
188 pub unknown: bool,
188 pub unknown: bool,
189 pub ignored: bool,
189 pub ignored: bool,
190 }
190 }
191
191
192 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
192 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
193 modified: true,
193 modified: true,
194 added: true,
194 added: true,
195 removed: true,
195 removed: true,
196 clean: false,
196 clean: false,
197 deleted: true,
197 deleted: true,
198 unknown: true,
198 unknown: true,
199 ignored: false,
199 ignored: false,
200 };
200 };
201
201
202 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
202 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
203 modified: true,
203 modified: true,
204 added: true,
204 added: true,
205 removed: true,
205 removed: true,
206 clean: true,
206 clean: true,
207 deleted: true,
207 deleted: true,
208 unknown: true,
208 unknown: true,
209 ignored: true,
209 ignored: true,
210 };
210 };
211
211
212 impl DisplayStates {
212 impl DisplayStates {
213 pub fn is_empty(&self) -> bool {
213 pub fn is_empty(&self) -> bool {
214 !(self.modified
214 !(self.modified
215 || self.added
215 || self.added
216 || self.removed
216 || self.removed
217 || self.clean
217 || self.clean
218 || self.deleted
218 || self.deleted
219 || self.unknown
219 || self.unknown
220 || self.ignored)
220 || self.ignored)
221 }
221 }
222 }
222 }
223
223
224 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
224 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
225 Ok(repo.dirstate_parents()?.is_merge())
225 Ok(repo.dirstate_parents()?.is_merge())
226 }
226 }
227
227
228 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
228 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
229 // These are all the known values for the [fname] argument of
229 // These are all the known values for the [fname] argument of
230 // [addunfinished] function in [state.py]
230 // [addunfinished] function in [state.py]
231 let known_state_files: &[&str] = &[
231 let known_state_files: &[&str] = &[
232 "bisect.state",
232 "bisect.state",
233 "graftstate",
233 "graftstate",
234 "histedit-state",
234 "histedit-state",
235 "rebasestate",
235 "rebasestate",
236 "shelvedstate",
236 "shelvedstate",
237 "transplant/journal",
237 "transplant/journal",
238 "updatestate",
238 "updatestate",
239 ];
239 ];
240 if has_unfinished_merge(repo)? {
240 if has_unfinished_merge(repo)? {
241 return Ok(true);
241 return Ok(true);
242 };
242 };
243 for f in known_state_files {
243 for f in known_state_files {
244 if repo.hg_vfs().join(f).exists() {
244 if repo.hg_vfs().join(f).exists() {
245 return Ok(true);
245 return Ok(true);
246 }
246 }
247 }
247 }
248 Ok(false)
248 Ok(false)
249 }
249 }
250
250
251 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
251 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
252 // TODO: lift these limitations
252 // TODO: lift these limitations
253 if invocation
253 if invocation
254 .config
254 .config
255 .get(b"commands", b"status.terse")
255 .get(b"commands", b"status.terse")
256 .is_some()
256 .is_some()
257 {
257 {
258 return Err(CommandError::unsupported(
258 return Err(CommandError::unsupported(
259 "status.terse is not yet supported with rhg status",
259 "status.terse is not yet supported with rhg status",
260 ));
260 ));
261 }
261 }
262
262
263 let ui = invocation.ui;
263 let ui = invocation.ui;
264 let config = invocation.config;
264 let config = invocation.config;
265 let args = invocation.subcommand_args;
265 let args = invocation.subcommand_args;
266
266
267 let revs = args.get_many::<String>("rev");
267 let revs = args.get_many::<String>("rev");
268 let print0 = args.get_flag("print0");
268 let print0 = args.get_flag("print0");
269 let verbose = args.get_flag("verbose")
269 let verbose = args.get_flag("verbose")
270 || config.get_bool(b"ui", b"verbose")?
270 || config.get_bool(b"ui", b"verbose")?
271 || config.get_bool(b"commands", b"status.verbose")?;
271 || config.get_bool(b"commands", b"status.verbose")?;
272 let verbose = verbose && !print0;
272 let verbose = verbose && !print0;
273
273
274 let all = args.get_flag("all");
274 let all = args.get_flag("all");
275 let display_states = if all {
275 let display_states = if all {
276 // TODO when implementing `--quiet`: it excludes clean files
276 // TODO when implementing `--quiet`: it excludes clean files
277 // from `--all`
277 // from `--all`
278 ALL_DISPLAY_STATES
278 ALL_DISPLAY_STATES
279 } else {
279 } else {
280 let requested = DisplayStates {
280 let requested = DisplayStates {
281 modified: args.get_flag("modified"),
281 modified: args.get_flag("modified"),
282 added: args.get_flag("added"),
282 added: args.get_flag("added"),
283 removed: args.get_flag("removed"),
283 removed: args.get_flag("removed"),
284 clean: args.get_flag("clean"),
284 clean: args.get_flag("clean"),
285 deleted: args.get_flag("deleted"),
285 deleted: args.get_flag("deleted"),
286 unknown: args.get_flag("unknown"),
286 unknown: args.get_flag("unknown"),
287 ignored: args.get_flag("ignored"),
287 ignored: args.get_flag("ignored"),
288 };
288 };
289 if requested.is_empty() {
289 if requested.is_empty() {
290 DEFAULT_DISPLAY_STATES
290 DEFAULT_DISPLAY_STATES
291 } else {
291 } else {
292 requested
292 requested
293 }
293 }
294 };
294 };
295 let no_status = args.get_flag("no-status");
295 let no_status = args.get_flag("no-status");
296 let list_copies = all
296 let list_copies = all
297 || args.get_flag("copies")
297 || args.get_flag("copies")
298 || config.get_bool(b"ui", b"statuscopies")?;
298 || config.get_bool(b"ui", b"statuscopies")?;
299
299
300 let repo = invocation.repo?;
300 let repo = invocation.repo?;
301 let revpair = parse_revpair(repo, revs.map(|i| i.cloned().collect()))?;
301 let revpair = parse_revpair(repo, revs.map(|i| i.cloned().collect()))?;
302
302
303 if verbose && has_unfinished_state(repo)? {
303 if verbose && has_unfinished_state(repo)? {
304 return Err(CommandError::unsupported(
304 return Err(CommandError::unsupported(
305 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
305 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
306 ));
306 ));
307 }
307 }
308
308
309 let mut dmap = repo.dirstate_map_mut()?;
309 let mut dmap = repo.dirstate_map_mut()?;
310
310
311 let check_exec = hg::checkexec::check_exec(repo.working_directory_path());
311 let check_exec = hg::checkexec::check_exec(repo.working_directory_path());
312
312
313 let options = StatusOptions {
313 let options = StatusOptions {
314 check_exec,
314 check_exec,
315 list_clean: display_states.clean,
315 list_clean: display_states.clean,
316 list_unknown: display_states.unknown,
316 list_unknown: display_states.unknown,
317 list_ignored: display_states.ignored,
317 list_ignored: display_states.ignored,
318 list_copies,
318 list_copies,
319 collect_traversed_dirs: false,
319 collect_traversed_dirs: false,
320 };
320 };
321
321
322 type StatusResult<'a> =
322 type StatusResult<'a> =
323 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
323 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
324
324
325 let relative_status = config
325 let relative_status = config
326 .get_option(b"commands", b"status.relative")?
326 .get_option(b"commands", b"status.relative")?
327 .expect("commands.status.relative should have a default value");
327 .expect("commands.status.relative should have a default value");
328
328
329 let relativize_paths = relative_status || {
329 let relativize_paths = relative_status || {
330 // See in Python code with `getuipathfn` usage in `commands.py`.
330 // See in Python code with `getuipathfn` usage in `commands.py`.
331 let legacy_relative_behavior = args.contains_id("file");
331 let legacy_relative_behavior = args.contains_id("file");
332 match relative_paths(invocation.config)? {
332 match relative_paths(invocation.config)? {
333 RelativePaths::Legacy => legacy_relative_behavior,
333 RelativePaths::Legacy => legacy_relative_behavior,
334 RelativePaths::Bool(v) => v,
334 RelativePaths::Bool(v) => v,
335 }
335 }
336 };
336 };
337
337
338 let mut output = DisplayStatusPaths {
338 let mut output = DisplayStatusPaths {
339 ui,
339 ui,
340 no_status,
340 no_status,
341 relativize: if relativize_paths {
341 relativize: if relativize_paths {
342 Some(RelativizePaths::new(repo)?)
342 Some(RelativizePaths::new(repo)?)
343 } else {
343 } else {
344 None
344 None
345 },
345 },
346 print0,
346 print0,
347 };
347 };
348
348
349 let after_status = |res: StatusResult| -> Result<_, CommandError> {
349 let after_status = |res: StatusResult| -> Result<_, CommandError> {
350 let (mut ds_status, pattern_warnings) = res?;
350 let (mut ds_status, pattern_warnings) = res?;
351 for warning in pattern_warnings {
351 for warning in pattern_warnings {
352 ui.write_stderr(&format_pattern_file_warning(&warning, repo))?;
352 ui.write_stderr(&format_pattern_file_warning(&warning, repo))?;
353 }
353 }
354
354
355 for (path, error) in take(&mut ds_status.bad) {
355 for (path, error) in take(&mut ds_status.bad) {
356 let error = match error {
356 let error = match error {
357 hg::BadMatch::OsError(code) => {
357 hg::BadMatch::OsError(code) => {
358 std::io::Error::from_raw_os_error(code).to_string()
358 std::io::Error::from_raw_os_error(code).to_string()
359 }
359 }
360 hg::BadMatch::BadType(ty) => {
360 hg::BadMatch::BadType(ty) => {
361 format!("unsupported file type (type is {})", ty)
361 format!("unsupported file type (type is {})", ty)
362 }
362 }
363 };
363 };
364 ui.write_stderr(&format_bytes!(
364 ui.write_stderr(&format_bytes!(
365 b"{}: {}\n",
365 b"{}: {}\n",
366 path.as_bytes(),
366 path.as_bytes(),
367 error.as_bytes()
367 error.as_bytes()
368 ))?
368 ))?
369 }
369 }
370 if !ds_status.unsure.is_empty() {
370 if !ds_status.unsure.is_empty() {
371 info!(
371 info!(
372 "Files to be rechecked by retrieval from filelog: {:?}",
372 "Files to be rechecked by retrieval from filelog: {:?}",
373 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
373 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
374 );
374 );
375 }
375 }
376 let mut fixup = Vec::new();
376 let mut fixup = Vec::new();
377 if !ds_status.unsure.is_empty()
377 if !ds_status.unsure.is_empty()
378 && (display_states.modified || display_states.clean)
378 && (display_states.modified || display_states.clean)
379 {
379 {
380 let p1 = repo.dirstate_parents()?.p1;
380 let p1 = repo.dirstate_parents()?.p1;
381 let manifest = repo.manifest_for_node(p1).map_err(|e| {
381 let manifest = repo.manifest_for_node(p1).map_err(|e| {
382 CommandError::from((e, &*format!("{:x}", p1.short())))
382 CommandError::from((e, &*format!("{:x}", p1.short())))
383 })?;
383 })?;
384 let working_directory_vfs = repo.working_directory_vfs();
384 let working_directory_vfs = repo.working_directory_vfs();
385 let store_vfs = repo.store_vfs();
385 let store_vfs = repo.store_vfs();
386 let revlog_open_options = repo.default_revlog_options(false)?;
386 let revlog_open_options = repo.default_revlog_options(false)?;
387 let res: Vec<_> = take(&mut ds_status.unsure)
387 let res: Vec<_> = take(&mut ds_status.unsure)
388 .into_par_iter()
388 .into_par_iter()
389 .map(|to_check| {
389 .map(|to_check| {
390 // The compiler seems to get a bit confused with complex
390 // The compiler seems to get a bit confused with complex
391 // inference when using a parallel iterator + map
391 // inference when using a parallel iterator + map
392 // + map_err + collect, so let's just inline some of the
392 // + map_err + collect, so let's just inline some of the
393 // logic.
393 // logic.
394 match unsure_is_modified(
394 match unsure_is_modified(
395 working_directory_vfs,
395 working_directory_vfs,
396 store_vfs,
396 store_vfs,
397 check_exec,
397 check_exec,
398 &manifest,
398 &manifest,
399 &to_check.path,
399 &to_check.path,
400 revlog_open_options,
400 revlog_open_options,
401 ) {
401 ) {
402 Err(HgError::IoError { .. }) => {
402 Err(HgError::IoError { .. }) => {
403 // IO errors most likely stem from the file being
403 // IO errors most likely stem from the file being
404 // deleted even though we know it's in the
404 // deleted even though we know it's in the
405 // dirstate.
405 // dirstate.
406 Ok((to_check, UnsureOutcome::Deleted))
406 Ok((to_check, UnsureOutcome::Deleted))
407 }
407 }
408 Ok(outcome) => Ok((to_check, outcome)),
408 Ok(outcome) => Ok((to_check, outcome)),
409 Err(e) => Err(e),
409 Err(e) => Err(e),
410 }
410 }
411 })
411 })
412 .collect::<Result<_, _>>()?;
412 .collect::<Result<_, _>>()?;
413 for (status_path, outcome) in res.into_iter() {
413 for (status_path, outcome) in res.into_iter() {
414 match outcome {
414 match outcome {
415 UnsureOutcome::Clean => {
415 UnsureOutcome::Clean => {
416 if display_states.clean {
416 if display_states.clean {
417 ds_status.clean.push(status_path.clone());
417 ds_status.clean.push(status_path.clone());
418 }
418 }
419 fixup.push(status_path.path.into_owned())
419 fixup.push(status_path.path.into_owned())
420 }
420 }
421 UnsureOutcome::Modified => {
421 UnsureOutcome::Modified => {
422 if display_states.modified {
422 if display_states.modified {
423 ds_status.modified.push(status_path);
423 ds_status.modified.push(status_path);
424 }
424 }
425 }
425 }
426 UnsureOutcome::Deleted => {
426 UnsureOutcome::Deleted => {
427 if display_states.deleted {
427 if display_states.deleted {
428 ds_status.deleted.push(status_path);
428 ds_status.deleted.push(status_path);
429 }
429 }
430 }
430 }
431 }
431 }
432 }
432 }
433 }
433 }
434
434
435 let dirstate_write_needed = ds_status.dirty;
435 let dirstate_write_needed = ds_status.dirty;
436 let filesystem_time_at_status_start =
436 let filesystem_time_at_status_start =
437 ds_status.filesystem_time_at_status_start;
437 ds_status.filesystem_time_at_status_start;
438
438
439 output.output(display_states, ds_status)?;
439 output.output(display_states, ds_status)?;
440
440
441 Ok((
441 Ok((
442 fixup,
442 fixup,
443 dirstate_write_needed,
443 dirstate_write_needed,
444 filesystem_time_at_status_start,
444 filesystem_time_at_status_start,
445 ))
445 ))
446 };
446 };
447 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
447 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
448
448
449 if let Some((rev1, rev2)) = revpair {
449 if let Some((rev1, rev2)) = revpair {
450 let mut ds_status = DirstateStatus::default();
450 let mut ds_status = DirstateStatus::default();
451 if list_copies {
451 if list_copies {
452 return Err(CommandError::unsupported(
452 return Err(CommandError::unsupported(
453 "status --rev --rev with copy information is not implemented yet",
453 "status --rev --rev with copy information is not implemented yet",
454 ));
454 ));
455 }
455 }
456
456
457 let stat = hg::operations::status_rev_rev_no_copies(
457 let stat = hg::operations::status_rev_rev_no_copies(
458 repo,
458 repo,
459 rev1,
459 rev1,
460 rev2,
460 rev2,
461 narrow_matcher,
461 narrow_matcher,
462 )?;
462 )?;
463 for entry in stat.iter() {
463 for entry in stat.iter() {
464 let (path, status) = entry?;
464 let (path, status) = entry?;
465 let path = StatusPath {
465 let path = StatusPath {
466 path: Cow::Borrowed(path),
466 path: Cow::Borrowed(path),
467 copy_source: None,
467 copy_source: None,
468 };
468 };
469 match status {
469 match status {
470 hg::operations::DiffStatus::Removed => {
470 hg::operations::DiffStatus::Removed => {
471 if display_states.removed {
471 if display_states.removed {
472 ds_status.removed.push(path)
472 ds_status.removed.push(path)
473 }
473 }
474 }
474 }
475 hg::operations::DiffStatus::Added => {
475 hg::operations::DiffStatus::Added => {
476 if display_states.added {
476 if display_states.added {
477 ds_status.added.push(path)
477 ds_status.added.push(path)
478 }
478 }
479 }
479 }
480 hg::operations::DiffStatus::Modified => {
480 hg::operations::DiffStatus::Modified => {
481 if display_states.modified {
481 if display_states.modified {
482 ds_status.modified.push(path)
482 ds_status.modified.push(path)
483 }
483 }
484 }
484 }
485 hg::operations::DiffStatus::Matching => {
485 hg::operations::DiffStatus::Matching => {
486 if display_states.clean {
486 if display_states.clean {
487 ds_status.clean.push(path)
487 ds_status.clean.push(path)
488 }
488 }
489 }
489 }
490 }
490 }
491 }
491 }
492 output.output(display_states, ds_status)?;
492 output.output(display_states, ds_status)?;
493 return Ok(());
493 return Ok(());
494 }
494 }
495
495
496 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
496 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
497 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
497 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
498 (true, true) => {
498 (true, true) => {
499 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
499 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
500 }
500 }
501 (true, false) => narrow_matcher,
501 (true, false) => narrow_matcher,
502 (false, true) => sparse_matcher,
502 (false, true) => sparse_matcher,
503 (false, false) => Box::new(AlwaysMatcher),
503 (false, false) => Box::new(AlwaysMatcher),
504 };
504 };
505 let matcher = match args.get_many::<std::ffi::OsString>("file") {
505 let matcher = match args.get_many::<std::ffi::OsString>("file") {
506 None => matcher,
506 None => matcher,
507 Some(files) => {
507 Some(files) => {
508 let patterns: Vec<Vec<u8>> = files
508 let patterns: Vec<Vec<u8>> = files
509 .filter(|s| !s.is_empty())
509 .filter(|s| !s.is_empty())
510 .map(get_bytes_from_os_str)
510 .map(get_bytes_from_os_str)
511 .collect();
511 .collect();
512 for file in &patterns {
512 for file in &patterns {
513 if file.starts_with(b"set:") {
513 if file.starts_with(b"set:") {
514 return Err(CommandError::unsupported("fileset"));
514 return Err(CommandError::unsupported("fileset"));
515 }
515 }
516 }
516 }
517 let cwd = hg::utils::current_dir()?;
517 let cwd = hg::utils::current_dir()?;
518 let root = repo.working_directory_path();
518 let root = repo.working_directory_path();
519 let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?;
519 let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?;
520 let files_matcher =
520 let files_matcher =
521 hg::matchers::PatternMatcher::new(ignore_patterns)?;
521 hg::matchers::PatternMatcher::new(ignore_patterns)?;
522 Box::new(IntersectionMatcher::new(
522 Box::new(IntersectionMatcher::new(
523 Box::new(files_matcher),
523 Box::new(files_matcher),
524 matcher,
524 matcher,
525 ))
525 ))
526 }
526 }
527 };
527 };
528
528
529 print_narrow_sparse_warnings(
529 print_narrow_sparse_warnings(
530 &narrow_warnings,
530 &narrow_warnings,
531 &sparse_warnings,
531 &sparse_warnings,
532 ui,
532 ui,
533 repo,
533 repo,
534 )?;
534 )?;
535 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
535 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
536 dmap.with_status(
536 dmap.with_status(
537 matcher.as_ref(),
537 matcher.as_ref(),
538 repo.working_directory_path().to_owned(),
538 repo.working_directory_path().to_owned(),
539 ignore_files(repo, config),
539 ignore_files(repo, config),
540 options,
540 options,
541 after_status,
541 after_status,
542 )?;
542 )?;
543
543
544 // Development config option to test write races
544 // Development config option to test write races
545 if let Err(e) =
545 if let Err(e) =
546 debug_wait_for_file(config, "status.pre-dirstate-write-file")
546 debug_wait_for_file(config, "status.pre-dirstate-write-file")
547 {
547 {
548 ui.write_stderr(e.as_bytes()).ok();
548 ui.write_stderr(e.as_bytes()).ok();
549 }
549 }
550
550
551 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
551 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
552 && !dirstate_write_needed
552 && !dirstate_write_needed
553 {
553 {
554 // Nothing to update
554 // Nothing to update
555 return Ok(());
555 return Ok(());
556 }
556 }
557
557
558 // Update the dirstate on disk if we can
558 // Update the dirstate on disk if we can
559 let with_lock_result =
559 let with_lock_result =
560 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
560 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
561 if let Some(mtime_boundary) = filesystem_time_at_status_start {
561 if let Some(mtime_boundary) = filesystem_time_at_status_start {
562 for hg_path in fixup {
562 for hg_path in fixup {
563 use std::os::unix::fs::MetadataExt;
563 use std::os::unix::fs::MetadataExt;
564 let fs_path = hg_path_to_path_buf(&hg_path)
564 let fs_path = hg_path_to_path_buf(&hg_path)
565 .expect("HgPath conversion");
565 .expect("HgPath conversion");
566 // Specifically do not reuse `fs_metadata` from
566 // Specifically do not reuse `fs_metadata` from
567 // `unsure_is_clean` which was needed before reading
567 // `unsure_is_clean` which was needed before reading
568 // contents. Here we access metadata again after reading
568 // contents. Here we access metadata again after reading
569 // content, in case it changed in the meantime.
569 // content, in case it changed in the meantime.
570 let metadata_res = repo
570 let metadata_res = repo
571 .working_directory_vfs()
571 .working_directory_vfs()
572 .symlink_metadata(&fs_path);
572 .symlink_metadata(&fs_path);
573 let fs_metadata = match metadata_res {
573 let fs_metadata = match metadata_res {
574 Ok(meta) => meta,
574 Ok(meta) => meta,
575 Err(err) => match err {
575 Err(err) => match err {
576 HgError::IoError { .. } => {
576 HgError::IoError { .. } => {
577 // The file has probably been deleted. In any
577 // The file has probably been deleted. In any
578 // case, it was in the dirstate before, so
578 // case, it was in the dirstate before, so
579 // let's ignore the error.
579 // let's ignore the error.
580 continue;
580 continue;
581 }
581 }
582 _ => return Err(err.into()),
582 _ => return Err(err.into()),
583 },
583 },
584 };
584 };
585 if let Some(mtime) =
585 if let Some(mtime) =
586 TruncatedTimestamp::for_reliable_mtime_of(
586 TruncatedTimestamp::for_reliable_mtime_of(
587 &fs_metadata,
587 &fs_metadata,
588 &mtime_boundary,
588 &mtime_boundary,
589 )
589 )
590 .when_reading_file(&fs_path)?
590 .when_reading_file(&fs_path)?
591 {
591 {
592 let mode = fs_metadata.mode();
592 let mode = fs_metadata.mode();
593 let size = fs_metadata.len();
593 let size = fs_metadata.len();
594 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
594 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
595 dirstate_write_needed = true
595 dirstate_write_needed = true
596 }
596 }
597 }
597 }
598 }
598 }
599 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
599 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
600 if dirstate_write_needed {
600 if dirstate_write_needed {
601 repo.write_dirstate()?
601 repo.write_dirstate()?
602 }
602 }
603 Ok(())
603 Ok(())
604 });
604 });
605 match with_lock_result {
605 match with_lock_result {
606 Ok(closure_result) => closure_result?,
606 Ok(closure_result) => closure_result?,
607 Err(LockError::AlreadyHeld) => {
607 Err(LockError::AlreadyHeld) => {
608 // Not updating the dirstate is not ideal but not critical:
608 // Not updating the dirstate is not ideal but not critical:
609 // don’t keep our caller waiting until some other Mercurial
609 // don’t keep our caller waiting until some other Mercurial
610 // process releases the lock.
610 // process releases the lock.
611 log::info!("not writing dirstate from `status`: lock is held")
611 log::info!("not writing dirstate from `status`: lock is held")
612 }
612 }
613 Err(LockError::Other(HgError::IoError { error, .. }))
613 Err(LockError::Other(HgError::IoError { error, .. }))
614 if error.kind() == io::ErrorKind::PermissionDenied =>
614 if error.kind() == io::ErrorKind::PermissionDenied
615 || match error.raw_os_error() {
616 None => false,
617 Some(errno) => libc::EROFS == errno,
618 } =>
615 {
619 {
616 // `hg status` on a read-only repository is fine
620 // `hg status` on a read-only repository is fine
617 }
621 }
618 Err(LockError::Other(error)) => {
622 Err(LockError::Other(error)) => {
619 // Report other I/O errors
623 // Report other I/O errors
620 Err(error)?
624 Err(error)?
621 }
625 }
622 }
626 }
623 Ok(())
627 Ok(())
624 }
628 }
625
629
626 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
630 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
627 let mut ignore_files = Vec::new();
631 let mut ignore_files = Vec::new();
628 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
632 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
629 if repo_ignore.exists() {
633 if repo_ignore.exists() {
630 ignore_files.push(repo_ignore)
634 ignore_files.push(repo_ignore)
631 }
635 }
632 for (key, value) in config.iter_section(b"ui") {
636 for (key, value) in config.iter_section(b"ui") {
633 if key == b"ignore" || key.starts_with(b"ignore.") {
637 if key == b"ignore" || key.starts_with(b"ignore.") {
634 let path = get_path_from_bytes(value);
638 let path = get_path_from_bytes(value);
635 let path = shellexpand::path::full_with_context_no_errors(
639 let path = shellexpand::path::full_with_context_no_errors(
636 path,
640 path,
637 home::home_dir,
641 home::home_dir,
638 |s| std::env::var(s).ok(),
642 |s| std::env::var(s).ok(),
639 );
643 );
640 let joined = repo.working_directory_path().join(path);
644 let joined = repo.working_directory_path().join(path);
641 ignore_files.push(joined);
645 ignore_files.push(joined);
642 }
646 }
643 }
647 }
644 ignore_files
648 ignore_files
645 }
649 }
646
650
647 struct DisplayStatusPaths<'a> {
651 struct DisplayStatusPaths<'a> {
648 ui: &'a Ui,
652 ui: &'a Ui,
649 no_status: bool,
653 no_status: bool,
650 relativize: Option<RelativizePaths>,
654 relativize: Option<RelativizePaths>,
651 print0: bool,
655 print0: bool,
652 }
656 }
653
657
654 impl DisplayStatusPaths<'_> {
658 impl DisplayStatusPaths<'_> {
655 // Probably more elegant to use a Deref or Borrow trait rather than
659 // Probably more elegant to use a Deref or Borrow trait rather than
656 // harcode HgPathBuf, but probably not really useful at this point
660 // harcode HgPathBuf, but probably not really useful at this point
657 fn display(
661 fn display(
658 &self,
662 &self,
659 status_prefix: &[u8],
663 status_prefix: &[u8],
660 label: &'static str,
664 label: &'static str,
661 mut paths: Vec<StatusPath<'_>>,
665 mut paths: Vec<StatusPath<'_>>,
662 ) -> Result<(), CommandError> {
666 ) -> Result<(), CommandError> {
663 paths.sort_unstable();
667 paths.sort_unstable();
664 // TODO: get the stdout lock once for the whole loop
668 // TODO: get the stdout lock once for the whole loop
665 // instead of in each write
669 // instead of in each write
666 for StatusPath { path, copy_source } in paths {
670 for StatusPath { path, copy_source } in paths {
667 let relative_path;
671 let relative_path;
668 let relative_source;
672 let relative_source;
669 let (path, copy_source) = if let Some(relativize) =
673 let (path, copy_source) = if let Some(relativize) =
670 &self.relativize
674 &self.relativize
671 {
675 {
672 relative_path = relativize.relativize(&path);
676 relative_path = relativize.relativize(&path);
673 relative_source =
677 relative_source =
674 copy_source.as_ref().map(|s| relativize.relativize(s));
678 copy_source.as_ref().map(|s| relativize.relativize(s));
675 (&*relative_path, relative_source.as_deref())
679 (&*relative_path, relative_source.as_deref())
676 } else {
680 } else {
677 (path.as_bytes(), copy_source.as_ref().map(|s| s.as_bytes()))
681 (path.as_bytes(), copy_source.as_ref().map(|s| s.as_bytes()))
678 };
682 };
679 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
683 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
680 // in order to stream to stdout instead of allocating an
684 // in order to stream to stdout instead of allocating an
681 // itermediate `Vec<u8>`.
685 // itermediate `Vec<u8>`.
682 if !self.no_status {
686 if !self.no_status {
683 self.ui.write_stdout_labelled(status_prefix, label)?
687 self.ui.write_stdout_labelled(status_prefix, label)?
684 }
688 }
685 let linebreak = if self.print0 { b"\x00" } else { b"\n" };
689 let linebreak = if self.print0 { b"\x00" } else { b"\n" };
686 self.ui.write_stdout_labelled(
690 self.ui.write_stdout_labelled(
687 &format_bytes!(b"{}{}", path, linebreak),
691 &format_bytes!(b"{}{}", path, linebreak),
688 label,
692 label,
689 )?;
693 )?;
690 if let Some(source) = copy_source.filter(|_| !self.no_status) {
694 if let Some(source) = copy_source.filter(|_| !self.no_status) {
691 let label = "status.copied";
695 let label = "status.copied";
692 self.ui.write_stdout_labelled(
696 self.ui.write_stdout_labelled(
693 &format_bytes!(b" {}{}", source, linebreak),
697 &format_bytes!(b" {}{}", source, linebreak),
694 label,
698 label,
695 )?
699 )?
696 }
700 }
697 }
701 }
698 Ok(())
702 Ok(())
699 }
703 }
700
704
701 fn output(
705 fn output(
702 &mut self,
706 &mut self,
703 display_states: DisplayStates,
707 display_states: DisplayStates,
704 ds_status: DirstateStatus,
708 ds_status: DirstateStatus,
705 ) -> Result<(), CommandError> {
709 ) -> Result<(), CommandError> {
706 if display_states.modified {
710 if display_states.modified {
707 self.display(b"M ", "status.modified", ds_status.modified)?;
711 self.display(b"M ", "status.modified", ds_status.modified)?;
708 }
712 }
709 if display_states.added {
713 if display_states.added {
710 self.display(b"A ", "status.added", ds_status.added)?;
714 self.display(b"A ", "status.added", ds_status.added)?;
711 }
715 }
712 if display_states.removed {
716 if display_states.removed {
713 self.display(b"R ", "status.removed", ds_status.removed)?;
717 self.display(b"R ", "status.removed", ds_status.removed)?;
714 }
718 }
715 if display_states.deleted {
719 if display_states.deleted {
716 self.display(b"! ", "status.deleted", ds_status.deleted)?;
720 self.display(b"! ", "status.deleted", ds_status.deleted)?;
717 }
721 }
718 if display_states.unknown {
722 if display_states.unknown {
719 self.display(b"? ", "status.unknown", ds_status.unknown)?;
723 self.display(b"? ", "status.unknown", ds_status.unknown)?;
720 }
724 }
721 if display_states.ignored {
725 if display_states.ignored {
722 self.display(b"I ", "status.ignored", ds_status.ignored)?;
726 self.display(b"I ", "status.ignored", ds_status.ignored)?;
723 }
727 }
724 if display_states.clean {
728 if display_states.clean {
725 self.display(b"C ", "status.clean", ds_status.clean)?;
729 self.display(b"C ", "status.clean", ds_status.clean)?;
726 }
730 }
727 Ok(())
731 Ok(())
728 }
732 }
729 }
733 }
730
734
731 /// Outcome of the additional check for an ambiguous tracked file
735 /// Outcome of the additional check for an ambiguous tracked file
732 enum UnsureOutcome {
736 enum UnsureOutcome {
733 /// The file is actually clean
737 /// The file is actually clean
734 Clean,
738 Clean,
735 /// The file has been modified
739 /// The file has been modified
736 Modified,
740 Modified,
737 /// The file was deleted on disk (or became another type of fs entry)
741 /// The file was deleted on disk (or became another type of fs entry)
738 Deleted,
742 Deleted,
739 }
743 }
740
744
741 /// Check if a file is modified by comparing actual repo store and file system.
745 /// Check if a file is modified by comparing actual repo store and file system.
742 ///
746 ///
743 /// This meant to be used for those that the dirstate cannot resolve, due
747 /// This meant to be used for those that the dirstate cannot resolve, due
744 /// to time resolution limits.
748 /// to time resolution limits.
745 fn unsure_is_modified(
749 fn unsure_is_modified(
746 working_directory_vfs: hg::vfs::Vfs,
750 working_directory_vfs: hg::vfs::Vfs,
747 store_vfs: hg::vfs::Vfs,
751 store_vfs: hg::vfs::Vfs,
748 check_exec: bool,
752 check_exec: bool,
749 manifest: &Manifest,
753 manifest: &Manifest,
750 hg_path: &HgPath,
754 hg_path: &HgPath,
751 revlog_open_options: RevlogOpenOptions,
755 revlog_open_options: RevlogOpenOptions,
752 ) -> Result<UnsureOutcome, HgError> {
756 ) -> Result<UnsureOutcome, HgError> {
753 let vfs = working_directory_vfs;
757 let vfs = working_directory_vfs;
754 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
758 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
755 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
759 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
756 let is_symlink = fs_metadata.file_type().is_symlink();
760 let is_symlink = fs_metadata.file_type().is_symlink();
757
761
758 let entry = manifest
762 let entry = manifest
759 .find_by_path(hg_path)?
763 .find_by_path(hg_path)?
760 .expect("ambgious file not in p1");
764 .expect("ambgious file not in p1");
761
765
762 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
766 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
763 // dirstate
767 // dirstate
764 let fs_flags = if is_symlink {
768 let fs_flags = if is_symlink {
765 Some(b'l')
769 Some(b'l')
766 } else if check_exec && has_exec_bit(&fs_metadata) {
770 } else if check_exec && has_exec_bit(&fs_metadata) {
767 Some(b'x')
771 Some(b'x')
768 } else {
772 } else {
769 None
773 None
770 };
774 };
771
775
772 let entry_flags = if check_exec {
776 let entry_flags = if check_exec {
773 entry.flags
777 entry.flags
774 } else if entry.flags == Some(b'x') {
778 } else if entry.flags == Some(b'x') {
775 None
779 None
776 } else {
780 } else {
777 entry.flags
781 entry.flags
778 };
782 };
779
783
780 if entry_flags != fs_flags {
784 if entry_flags != fs_flags {
781 return Ok(UnsureOutcome::Modified);
785 return Ok(UnsureOutcome::Modified);
782 }
786 }
783 let filelog = hg::filelog::Filelog::open_vfs(
787 let filelog = hg::filelog::Filelog::open_vfs(
784 &store_vfs,
788 &store_vfs,
785 hg_path,
789 hg_path,
786 revlog_open_options,
790 revlog_open_options,
787 )?;
791 )?;
788 let fs_len = fs_metadata.len();
792 let fs_len = fs_metadata.len();
789 let file_node = entry.node_id()?;
793 let file_node = entry.node_id()?;
790 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
794 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
791 HgError::corrupted(format!(
795 HgError::corrupted(format!(
792 "filelog {:?} missing node {:?} from manifest",
796 "filelog {:?} missing node {:?} from manifest",
793 hg_path, file_node
797 hg_path, file_node
794 ))
798 ))
795 })?;
799 })?;
796 if filelog_entry.file_data_len_not_equal_to(fs_len) {
800 if filelog_entry.file_data_len_not_equal_to(fs_len) {
797 // No need to read file contents:
801 // No need to read file contents:
798 // it cannot be equal if it has a different length.
802 // it cannot be equal if it has a different length.
799 return Ok(UnsureOutcome::Modified);
803 return Ok(UnsureOutcome::Modified);
800 }
804 }
801
805
802 let p1_filelog_data = filelog_entry.data()?;
806 let p1_filelog_data = filelog_entry.data()?;
803 let p1_contents = p1_filelog_data.file_data()?;
807 let p1_contents = p1_filelog_data.file_data()?;
804 if p1_contents.len() as u64 != fs_len {
808 if p1_contents.len() as u64 != fs_len {
805 // No need to read file contents:
809 // No need to read file contents:
806 // it cannot be equal if it has a different length.
810 // it cannot be equal if it has a different length.
807 return Ok(UnsureOutcome::Modified);
811 return Ok(UnsureOutcome::Modified);
808 }
812 }
809
813
810 let fs_contents = if is_symlink {
814 let fs_contents = if is_symlink {
811 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
815 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
812 } else {
816 } else {
813 vfs.read(fs_path)?
817 vfs.read(fs_path)?
814 };
818 };
815
819
816 Ok(if p1_contents != &*fs_contents {
820 Ok(if p1_contents != &*fs_contents {
817 UnsureOutcome::Modified
821 UnsureOutcome::Modified
818 } else {
822 } else {
819 UnsureOutcome::Clean
823 UnsureOutcome::Clean
820 })
824 })
821 }
825 }
@@ -1,1840 +1,1844
1 #
1 #
2 # This is the mercurial setup script.
2 # This is the mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6 import os
6 import os
7
7
8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
9 # in bytestrings.
9 # in bytestrings.
10 supportedpy = ','.join(
10 supportedpy = ','.join(
11 [
11 [
12 '>=3.6.2',
12 '>=3.6.2',
13 ]
13 ]
14 )
14 )
15
15
16 import sys, platform
16 import sys, platform
17 import sysconfig
17 import sysconfig
18
18
19
19
20 def sysstr(s):
20 def sysstr(s):
21 return s.decode('latin-1')
21 return s.decode('latin-1')
22
22
23
23
24 def eprint(*args, **kwargs):
24 def eprint(*args, **kwargs):
25 kwargs['file'] = sys.stderr
25 kwargs['file'] = sys.stderr
26 print(*args, **kwargs)
26 print(*args, **kwargs)
27
27
28
28
29 import ssl
29 import ssl
30
30
31 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
31 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
32 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
32 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
33 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
33 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
34 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
34 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
35 # support. At the mentioned commit, they were unconditionally defined.
35 # support. At the mentioned commit, they were unconditionally defined.
36 _notset = object()
36 _notset = object()
37 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
37 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
38 if has_tlsv1_1 is _notset:
38 if has_tlsv1_1 is _notset:
39 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
39 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
40 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
40 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
41 if has_tlsv1_2 is _notset:
41 if has_tlsv1_2 is _notset:
42 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
42 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
43 if not (has_tlsv1_1 or has_tlsv1_2):
43 if not (has_tlsv1_1 or has_tlsv1_2):
44 error = """
44 error = """
45 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
45 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
46 Please make sure that your Python installation was compiled against an OpenSSL
46 Please make sure that your Python installation was compiled against an OpenSSL
47 version enabling these features (likely this requires the OpenSSL version to
47 version enabling these features (likely this requires the OpenSSL version to
48 be at least 1.0.1).
48 be at least 1.0.1).
49 """
49 """
50 print(error, file=sys.stderr)
50 print(error, file=sys.stderr)
51 sys.exit(1)
51 sys.exit(1)
52
52
53 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
53 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
54
54
55 # Solaris Python packaging brain damage
55 # Solaris Python packaging brain damage
56 try:
56 try:
57 import hashlib
57 import hashlib
58
58
59 sha = hashlib.sha1()
59 sha = hashlib.sha1()
60 except ImportError:
60 except ImportError:
61 try:
61 try:
62 import sha
62 import sha
63
63
64 sha.sha # silence unused import warning
64 sha.sha # silence unused import warning
65 except ImportError:
65 except ImportError:
66 raise SystemExit(
66 raise SystemExit(
67 "Couldn't import standard hashlib (incomplete Python install)."
67 "Couldn't import standard hashlib (incomplete Python install)."
68 )
68 )
69
69
70 try:
70 try:
71 import zlib
71 import zlib
72
72
73 zlib.compressobj # silence unused import warning
73 zlib.compressobj # silence unused import warning
74 except ImportError:
74 except ImportError:
75 raise SystemExit(
75 raise SystemExit(
76 "Couldn't import standard zlib (incomplete Python install)."
76 "Couldn't import standard zlib (incomplete Python install)."
77 )
77 )
78
78
79 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
79 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
80 isironpython = False
80 isironpython = False
81 try:
81 try:
82 isironpython = (
82 isironpython = (
83 platform.python_implementation().lower().find("ironpython") != -1
83 platform.python_implementation().lower().find("ironpython") != -1
84 )
84 )
85 except AttributeError:
85 except AttributeError:
86 pass
86 pass
87
87
88 if isironpython:
88 if isironpython:
89 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
89 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
90 else:
90 else:
91 try:
91 try:
92 import bz2
92 import bz2
93
93
94 bz2.BZ2Compressor # silence unused import warning
94 bz2.BZ2Compressor # silence unused import warning
95 except ImportError:
95 except ImportError:
96 raise SystemExit(
96 raise SystemExit(
97 "Couldn't import standard bz2 (incomplete Python install)."
97 "Couldn't import standard bz2 (incomplete Python install)."
98 )
98 )
99
99
100 ispypy = "PyPy" in sys.version
100 ispypy = "PyPy" in sys.version
101
101
102 import ctypes
102 import ctypes
103 import stat, subprocess, time
103 import stat, subprocess, time
104 import re
104 import re
105 import shutil
105 import shutil
106 import tempfile
106 import tempfile
107
107
108 # We have issues with setuptools on some platforms and builders. Until
108 # We have issues with setuptools on some platforms and builders. Until
109 # those are resolved, setuptools is opt-in except for platforms where
109 # those are resolved, setuptools is opt-in except for platforms where
110 # we don't have issues.
110 # we don't have issues.
111 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
111 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
112 if issetuptools:
112 if issetuptools:
113 from setuptools import setup
113 from setuptools import setup
114 else:
114 else:
115 try:
115 try:
116 from distutils.core import setup
116 from distutils.core import setup
117 except ModuleNotFoundError:
117 except ModuleNotFoundError:
118 from setuptools import setup
118 from setuptools import setup
119 from distutils.ccompiler import new_compiler
119 from distutils.ccompiler import new_compiler
120 from distutils.core import Command, Extension
120 from distutils.core import Command, Extension
121 from distutils.dist import Distribution
121 from distutils.dist import Distribution
122 from distutils.command.build import build
122 from distutils.command.build import build
123 from distutils.command.build_ext import build_ext
123 from distutils.command.build_ext import build_ext
124 from distutils.command.build_py import build_py
124 from distutils.command.build_py import build_py
125 from distutils.command.build_scripts import build_scripts
125 from distutils.command.build_scripts import build_scripts
126 from distutils.command.install import install
126 from distutils.command.install import install
127 from distutils.command.install_lib import install_lib
127 from distutils.command.install_lib import install_lib
128 from distutils.command.install_scripts import install_scripts
128 from distutils.command.install_scripts import install_scripts
129 from distutils import log
129 from distutils import log
130 from distutils.spawn import spawn, find_executable
130 from distutils.spawn import spawn, find_executable
131 from distutils import file_util
131 from distutils import file_util
132 from distutils.errors import (
132 from distutils.errors import (
133 CCompilerError,
133 CCompilerError,
134 DistutilsError,
134 DistutilsError,
135 DistutilsExecError,
135 DistutilsExecError,
136 )
136 )
137 from distutils.sysconfig import get_python_inc
137 from distutils.sysconfig import get_python_inc
138
138
139
139
140 def write_if_changed(path, content):
140 def write_if_changed(path, content):
141 """Write content to a file iff the content hasn't changed."""
141 """Write content to a file iff the content hasn't changed."""
142 if os.path.exists(path):
142 if os.path.exists(path):
143 with open(path, 'rb') as fh:
143 with open(path, 'rb') as fh:
144 current = fh.read()
144 current = fh.read()
145 else:
145 else:
146 current = b''
146 current = b''
147
147
148 if current != content:
148 if current != content:
149 with open(path, 'wb') as fh:
149 with open(path, 'wb') as fh:
150 fh.write(content)
150 fh.write(content)
151
151
152
152
153 scripts = ['hg']
153 scripts = ['hg']
154 if os.name == 'nt':
154 if os.name == 'nt':
155 # We remove hg.bat if we are able to build hg.exe.
155 # We remove hg.bat if we are able to build hg.exe.
156 scripts.append('contrib/win32/hg.bat')
156 scripts.append('contrib/win32/hg.bat')
157
157
158
158
159 def cancompile(cc, code):
159 def cancompile(cc, code):
160 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
160 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
161 devnull = oldstderr = None
161 devnull = oldstderr = None
162 try:
162 try:
163 fname = os.path.join(tmpdir, 'testcomp.c')
163 fname = os.path.join(tmpdir, 'testcomp.c')
164 f = open(fname, 'w')
164 f = open(fname, 'w')
165 f.write(code)
165 f.write(code)
166 f.close()
166 f.close()
167 # Redirect stderr to /dev/null to hide any error messages
167 # Redirect stderr to /dev/null to hide any error messages
168 # from the compiler.
168 # from the compiler.
169 # This will have to be changed if we ever have to check
169 # This will have to be changed if we ever have to check
170 # for a function on Windows.
170 # for a function on Windows.
171 devnull = open('/dev/null', 'w')
171 devnull = open('/dev/null', 'w')
172 oldstderr = os.dup(sys.stderr.fileno())
172 oldstderr = os.dup(sys.stderr.fileno())
173 os.dup2(devnull.fileno(), sys.stderr.fileno())
173 os.dup2(devnull.fileno(), sys.stderr.fileno())
174 objects = cc.compile([fname], output_dir=tmpdir)
174 objects = cc.compile([fname], output_dir=tmpdir)
175 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
175 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
176 return True
176 return True
177 except Exception:
177 except Exception:
178 return False
178 return False
179 finally:
179 finally:
180 if oldstderr is not None:
180 if oldstderr is not None:
181 os.dup2(oldstderr, sys.stderr.fileno())
181 os.dup2(oldstderr, sys.stderr.fileno())
182 if devnull is not None:
182 if devnull is not None:
183 devnull.close()
183 devnull.close()
184 shutil.rmtree(tmpdir)
184 shutil.rmtree(tmpdir)
185
185
186
186
187 # simplified version of distutils.ccompiler.CCompiler.has_function
187 # simplified version of distutils.ccompiler.CCompiler.has_function
188 # that actually removes its temporary files.
188 # that actually removes its temporary files.
189 def hasfunction(cc, funcname):
189 def hasfunction(cc, funcname):
190 code = 'int main(void) { %s(); }\n' % funcname
190 code = 'int main(void) { %s(); }\n' % funcname
191 return cancompile(cc, code)
191 return cancompile(cc, code)
192
192
193
193
194 def hasheader(cc, headername):
194 def hasheader(cc, headername):
195 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
195 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
196 return cancompile(cc, code)
196 return cancompile(cc, code)
197
197
198
198
199 # py2exe needs to be installed to work
199 # py2exe needs to be installed to work
200 try:
200 try:
201 import py2exe
201 import py2exe
202
202
203 py2exe.patch_distutils()
203 py2exe.patch_distutils()
204 py2exeloaded = True
204 py2exeloaded = True
205 # import py2exe's patched Distribution class
205 # import py2exe's patched Distribution class
206 from distutils.core import Distribution
206 from distutils.core import Distribution
207 except ImportError:
207 except ImportError:
208 py2exeloaded = False
208 py2exeloaded = False
209
209
210
210
211 def runcmd(cmd, env, cwd=None):
211 def runcmd(cmd, env, cwd=None):
212 p = subprocess.Popen(
212 p = subprocess.Popen(
213 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
213 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
214 )
214 )
215 out, err = p.communicate()
215 out, err = p.communicate()
216 return p.returncode, out, err
216 return p.returncode, out, err
217
217
218
218
219 class hgcommand:
219 class hgcommand:
220 def __init__(self, cmd, env):
220 def __init__(self, cmd, env):
221 self.cmd = cmd
221 self.cmd = cmd
222 self.env = env
222 self.env = env
223
223
224 def __repr__(self):
224 def __repr__(self):
225 return f"<hgcommand cmd={self.cmd} env={self.env}>"
225 return f"<hgcommand cmd={self.cmd} env={self.env}>"
226
226
227 def run(self, args):
227 def run(self, args):
228 cmd = self.cmd + args
228 cmd = self.cmd + args
229 returncode, out, err = runcmd(cmd, self.env)
229 returncode, out, err = runcmd(cmd, self.env)
230 err = filterhgerr(err)
230 err = filterhgerr(err)
231 if err:
231 if err:
232 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
232 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
233 print(err, file=sys.stderr)
233 print(err, file=sys.stderr)
234 if returncode != 0:
234 if returncode != 0:
235 print(
235 print(
236 "non zero-return '%s': %d" % (' '.join(cmd), returncode),
236 "non zero-return '%s': %d" % (' '.join(cmd), returncode),
237 file=sys.stderr,
237 file=sys.stderr,
238 )
238 )
239 return b''
239 return b''
240 return out
240 return out
241
241
242
242
243 def filterhgerr(err):
243 def filterhgerr(err):
244 # If root is executing setup.py, but the repository is owned by
244 # If root is executing setup.py, but the repository is owned by
245 # another user (as in "sudo python setup.py install") we will get
245 # another user (as in "sudo python setup.py install") we will get
246 # trust warnings since the .hg/hgrc file is untrusted. That is
246 # trust warnings since the .hg/hgrc file is untrusted. That is
247 # fine, we don't want to load it anyway. Python may warn about
247 # fine, we don't want to load it anyway. Python may warn about
248 # a missing __init__.py in mercurial/locale, we also ignore that.
248 # a missing __init__.py in mercurial/locale, we also ignore that.
249 err = [
249 err = [
250 e
250 e
251 for e in err.splitlines()
251 for e in err.splitlines()
252 if (
252 if (
253 not e.startswith(b'not trusting file')
253 not e.startswith(b'not trusting file')
254 and not e.startswith(b'warning: Not importing')
254 and not e.startswith(b'warning: Not importing')
255 and not (
255 and not (
256 e.startswith(b'obsolete feature not enabled')
256 e.startswith(b'obsolete feature not enabled')
257 or e.startswith(b'"obsolete" feature not enabled')
257 or e.startswith(b'"obsolete" feature not enabled')
258 )
258 )
259 and not e.startswith(b'*** failed to import extension')
259 and not e.startswith(b'*** failed to import extension')
260 and not e.startswith(b'devel-warn:')
260 and not e.startswith(b'devel-warn:')
261 and not (
261 and not (
262 e.startswith(b'(third party extension')
262 e.startswith(b'(third party extension')
263 and e.endswith(b'or newer of Mercurial; disabling)')
263 and e.endswith(b'or newer of Mercurial; disabling)')
264 )
264 )
265 )
265 )
266 ]
266 ]
267 return b'\n'.join(b' ' + e for e in err)
267 return b'\n'.join(b' ' + e for e in err)
268
268
269
269
270 def findhg():
270 def findhg():
271 """Try to figure out how we should invoke hg for examining the local
271 """Try to figure out how we should invoke hg for examining the local
272 repository contents.
272 repository contents.
273
273
274 Returns an hgcommand object."""
274 Returns an hgcommand object."""
275 # By default, prefer the "hg" command in the user's path. This was
275 # By default, prefer the "hg" command in the user's path. This was
276 # presumably the hg command that the user used to create this repository.
276 # presumably the hg command that the user used to create this repository.
277 #
277 #
278 # This repository may require extensions or other settings that would not
278 # This repository may require extensions or other settings that would not
279 # be enabled by running the hg script directly from this local repository.
279 # be enabled by running the hg script directly from this local repository.
280 hgenv = os.environ.copy()
280 hgenv = os.environ.copy()
281 # Use HGPLAIN to disable hgrc settings that would change output formatting,
281 # Use HGPLAIN to disable hgrc settings that would change output formatting,
282 # and disable localization for the same reasons.
282 # and disable localization for the same reasons.
283 hgenv['HGPLAIN'] = '1'
283 hgenv['HGPLAIN'] = '1'
284 hgenv['LANGUAGE'] = 'C'
284 hgenv['LANGUAGE'] = 'C'
285 hgcmd = ['hg']
285 hgcmd = ['hg']
286 # Run a simple "hg log" command just to see if using hg from the user's
286 # Run a simple "hg log" command just to see if using hg from the user's
287 # path works and can successfully interact with this repository. Windows
287 # path works and can successfully interact with this repository. Windows
288 # gives precedence to hg.exe in the current directory, so fall back to the
288 # gives precedence to hg.exe in the current directory, so fall back to the
289 # python invocation of local hg, where pythonXY.dll can always be found.
289 # python invocation of local hg, where pythonXY.dll can always be found.
290 check_cmd = ['log', '-r.', '-Ttest']
290 check_cmd = ['log', '-r.', '-Ttest']
291 attempts = []
291 attempts = []
292
292
293 def attempt(cmd, env):
293 def attempt(cmd, env):
294 try:
294 try:
295 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
295 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
296 res = (True, retcode, out, err)
296 res = (True, retcode, out, err)
297 if retcode == 0 and not filterhgerr(err):
297 if retcode == 0 and not filterhgerr(err):
298 return True
298 return True
299 except EnvironmentError as e:
299 except EnvironmentError as e:
300 res = (False, e)
300 res = (False, e)
301 attempts.append((cmd, res))
301 attempts.append((cmd, res))
302 return False
302 return False
303
303
304 if os.name != 'nt' or not os.path.exists("hg.exe"):
304 if os.name != 'nt' or not os.path.exists("hg.exe"):
305 if attempt(hgcmd + check_cmd, hgenv):
305 if attempt(hgcmd + check_cmd, hgenv):
306 return hgcommand(hgcmd, hgenv)
306 return hgcommand(hgcmd, hgenv)
307
307
308 # Fall back to trying the local hg installation (pure python)
308 # Fall back to trying the local hg installation (pure python)
309 repo_hg = os.path.join(os.path.dirname(__file__), 'hg')
309 repo_hg = os.path.join(os.path.dirname(__file__), 'hg')
310 hgenv = localhgenv()
310 hgenv = localhgenv()
311 hgcmd = [sys.executable, repo_hg]
311 hgcmd = [sys.executable, repo_hg]
312 if attempt(hgcmd + check_cmd, hgenv):
312 if attempt(hgcmd + check_cmd, hgenv):
313 return hgcommand(hgcmd, hgenv)
313 return hgcommand(hgcmd, hgenv)
314 # Fall back to trying the local hg installation (whatever we can)
314 # Fall back to trying the local hg installation (whatever we can)
315 hgenv = localhgenv(pure_python=False)
315 hgenv = localhgenv(pure_python=False)
316 hgcmd = [sys.executable, repo_hg]
316 hgcmd = [sys.executable, repo_hg]
317 if attempt(hgcmd + check_cmd, hgenv):
317 if attempt(hgcmd + check_cmd, hgenv):
318 return hgcommand(hgcmd, hgenv)
318 return hgcommand(hgcmd, hgenv)
319
319
320 eprint("/!\\")
320 eprint("/!\\")
321 eprint(r"/!\ Unable to find a working hg binary")
321 eprint(r"/!\ Unable to find a working hg binary")
322 eprint(r"/!\ Version cannot be extracted from the repository")
322 eprint(r"/!\ Version cannot be extracted from the repository")
323 eprint(r"/!\ Re-run the setup once a first version is built")
323 eprint(r"/!\ Re-run the setup once a first version is built")
324 eprint(r"/!\ Attempts:")
324 eprint(r"/!\ Attempts:")
325 for i, e in enumerate(attempts):
325 for i, e in enumerate(attempts):
326 eprint(r"/!\ attempt #%d:" % (i))
326 eprint(r"/!\ attempt #%d:" % (i))
327 eprint(r"/!\ cmd: ", e[0])
327 eprint(r"/!\ cmd: ", e[0])
328 res = e[1]
328 res = e[1]
329 if res[0]:
329 if res[0]:
330 eprint(r"/!\ return code:", res[1])
330 eprint(r"/!\ return code:", res[1])
331 eprint("/!\\ std output:\n%s" % (res[2].decode()), end="")
331 eprint("/!\\ std output:\n%s" % (res[2].decode()), end="")
332 eprint("/!\\ std error:\n%s" % (res[3].decode()), end="")
332 eprint("/!\\ std error:\n%s" % (res[3].decode()), end="")
333 else:
333 else:
334 eprint(r"/!\ exception: ", res[1])
334 eprint(r"/!\ exception: ", res[1])
335 return None
335 return None
336
336
337
337
338 def localhgenv(pure_python=True):
338 def localhgenv(pure_python=True):
339 """Get an environment dictionary to use for invoking or importing
339 """Get an environment dictionary to use for invoking or importing
340 mercurial from the local repository."""
340 mercurial from the local repository."""
341 # Execute hg out of this directory with a custom environment which takes
341 # Execute hg out of this directory with a custom environment which takes
342 # care to not use any hgrc files and do no localization.
342 # care to not use any hgrc files and do no localization.
343 env = {
343 env = {
344 'HGRCPATH': '',
344 'HGRCPATH': '',
345 'LANGUAGE': 'C',
345 'LANGUAGE': 'C',
346 'PATH': '',
346 'PATH': '',
347 } # make pypi modules that use os.environ['PATH'] happy
347 } # make pypi modules that use os.environ['PATH'] happy
348 if pure_python:
348 if pure_python:
349 env['HGMODULEPOLICY'] = 'py'
349 env['HGMODULEPOLICY'] = 'py'
350 if 'LD_LIBRARY_PATH' in os.environ:
350 if 'LD_LIBRARY_PATH' in os.environ:
351 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
351 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
352 if 'SystemRoot' in os.environ:
352 if 'SystemRoot' in os.environ:
353 # SystemRoot is required by Windows to load various DLLs. See:
353 # SystemRoot is required by Windows to load various DLLs. See:
354 # https://bugs.python.org/issue13524#msg148850
354 # https://bugs.python.org/issue13524#msg148850
355 env['SystemRoot'] = os.environ['SystemRoot']
355 env['SystemRoot'] = os.environ['SystemRoot']
356 return env
356 return env
357
357
358
358
359 version = ''
359 version = ''
360
360
361
361
362 def _try_get_version():
362 def _try_get_version():
363 hg = findhg()
363 hg = findhg()
364 if hg is None:
364 if hg is None:
365 return ''
365 return ''
366 hgid = None
366 hgid = None
367 numerictags = []
367 numerictags = []
368 cmd = ['log', '-r', '.', '--template', '{tags}\n']
368 cmd = ['log', '-r', '.', '--template', '{tags}\n']
369 pieces = sysstr(hg.run(cmd)).split()
369 pieces = sysstr(hg.run(cmd)).split()
370 numerictags = [t for t in pieces if t[0:1].isdigit()]
370 numerictags = [t for t in pieces if t[0:1].isdigit()]
371 hgid = sysstr(hg.run(['id', '-i'])).strip()
371 hgid = sysstr(hg.run(['id', '-i'])).strip()
372 if hgid.count('+') == 2:
372 if hgid.count('+') == 2:
373 hgid = hgid.replace("+", ".", 1)
373 hgid = hgid.replace("+", ".", 1)
374 if not hgid:
374 if not hgid:
375 eprint("/!\\")
375 eprint("/!\\")
376 eprint(r"/!\ Unable to determine hg version from local repository")
376 eprint(r"/!\ Unable to determine hg version from local repository")
377 eprint(r"/!\ Failed to retrieve current revision tags")
377 eprint(r"/!\ Failed to retrieve current revision tags")
378 return ''
378 return ''
379 if numerictags: # tag(s) found
379 if numerictags: # tag(s) found
380 version = numerictags[-1]
380 version = numerictags[-1]
381 if hgid.endswith('+'): # propagate the dirty status to the tag
381 if hgid.endswith('+'): # propagate the dirty status to the tag
382 version += '+'
382 version += '+'
383 else: # no tag found on the checked out revision
383 else: # no tag found on the checked out revision
384 ltagcmd = ['log', '--rev', 'wdir()', '--template', '{latesttag}']
384 ltagcmd = ['log', '--rev', 'wdir()', '--template', '{latesttag}']
385 ltag = sysstr(hg.run(ltagcmd))
385 ltag = sysstr(hg.run(ltagcmd))
386 if not ltag:
386 if not ltag:
387 eprint("/!\\")
387 eprint("/!\\")
388 eprint(r"/!\ Unable to determine hg version from local repository")
388 eprint(r"/!\ Unable to determine hg version from local repository")
389 eprint(
389 eprint(
390 r"/!\ Failed to retrieve current revision distance to lated tag"
390 r"/!\ Failed to retrieve current revision distance to lated tag"
391 )
391 )
392 return ''
392 return ''
393 changessincecmd = [
393 changessincecmd = [
394 'log',
394 'log',
395 '-T',
395 '-T',
396 'x\n',
396 'x\n',
397 '-r',
397 '-r',
398 "only(parents(),'%s')" % ltag,
398 "only(parents(),'%s')" % ltag,
399 ]
399 ]
400 changessince = len(hg.run(changessincecmd).splitlines())
400 changessince = len(hg.run(changessincecmd).splitlines())
401 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
401 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
402 if version.endswith('+'):
402 if version.endswith('+'):
403 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
403 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
404 return version
404 return version
405
405
406
406
407 if os.path.isdir('.hg'):
407 if os.path.isdir('.hg'):
408 version = _try_get_version()
408 version = _try_get_version()
409 elif os.path.exists('.hg_archival.txt'):
409 elif os.path.exists('.hg_archival.txt'):
410 kw = dict(
410 kw = dict(
411 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
411 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
412 )
412 )
413 if 'tag' in kw:
413 if 'tag' in kw:
414 version = kw['tag']
414 version = kw['tag']
415 elif 'latesttag' in kw:
415 elif 'latesttag' in kw:
416 if 'changessincelatesttag' in kw:
416 if 'changessincelatesttag' in kw:
417 version = (
417 version = (
418 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
418 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
419 )
419 )
420 else:
420 else:
421 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
421 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
422 else:
422 else:
423 version = '0+hg' + kw.get('node', '')[:12]
423 version = '0+hg' + kw.get('node', '')[:12]
424 elif os.path.exists('mercurial/__version__.py'):
424 elif os.path.exists('mercurial/__version__.py'):
425 with open('mercurial/__version__.py') as f:
425 with open('mercurial/__version__.py') as f:
426 data = f.read()
426 data = f.read()
427 version = re.search('version = b"(.*)"', data).group(1)
427 version = re.search('version = b"(.*)"', data).group(1)
428 if not version:
428 if not version:
429 if os.environ.get("MERCURIAL_SETUP_MAKE_LOCAL") == "1":
429 if os.environ.get("MERCURIAL_SETUP_MAKE_LOCAL") == "1":
430 version = "0.0+0"
430 version = "0.0+0"
431 eprint("/!\\")
431 eprint("/!\\")
432 eprint(r"/!\ Using '0.0+0' as the default version")
432 eprint(r"/!\ Using '0.0+0' as the default version")
433 eprint(r"/!\ Re-run make local once that first version is built")
433 eprint(r"/!\ Re-run make local once that first version is built")
434 eprint("/!\\")
434 eprint("/!\\")
435 else:
435 else:
436 eprint("/!\\")
436 eprint("/!\\")
437 eprint(r"/!\ Could not determine the Mercurial version")
437 eprint(r"/!\ Could not determine the Mercurial version")
438 eprint(r"/!\ You need to build a local version first")
438 eprint(r"/!\ You need to build a local version first")
439 eprint(r"/!\ Run `make local` and try again")
439 eprint(r"/!\ Run `make local` and try again")
440 eprint("/!\\")
440 eprint("/!\\")
441 msg = "Run `make local` first to get a working local version"
441 msg = "Run `make local` first to get a working local version"
442 raise SystemExit(msg)
442 raise SystemExit(msg)
443
443
444 versionb = version
444 versionb = version
445 if not isinstance(versionb, bytes):
445 if not isinstance(versionb, bytes):
446 versionb = versionb.encode('ascii')
446 versionb = versionb.encode('ascii')
447
447
448 write_if_changed(
448 write_if_changed(
449 'mercurial/__version__.py',
449 'mercurial/__version__.py',
450 b''.join(
450 b''.join(
451 [
451 [
452 b'# this file is autogenerated by setup.py\n'
452 b'# this file is autogenerated by setup.py\n'
453 b'version = b"%s"\n' % versionb,
453 b'version = b"%s"\n' % versionb,
454 ]
454 ]
455 ),
455 ),
456 )
456 )
457
457
458
458
459 class hgbuild(build):
459 class hgbuild(build):
460 # Insert hgbuildmo first so that files in mercurial/locale/ are found
460 # Insert hgbuildmo first so that files in mercurial/locale/ are found
461 # when build_py is run next.
461 # when build_py is run next.
462 sub_commands = [('build_mo', None)] + build.sub_commands
462 sub_commands = [('build_mo', None)] + build.sub_commands
463
463
464
464
465 class hgbuildmo(build):
465 class hgbuildmo(build):
466 description = "build translations (.mo files)"
466 description = "build translations (.mo files)"
467
467
468 def run(self):
468 def run(self):
469 if not find_executable('msgfmt'):
469 if not find_executable('msgfmt'):
470 self.warn(
470 self.warn(
471 "could not find msgfmt executable, no translations "
471 "could not find msgfmt executable, no translations "
472 "will be built"
472 "will be built"
473 )
473 )
474 return
474 return
475
475
476 podir = 'i18n'
476 podir = 'i18n'
477 if not os.path.isdir(podir):
477 if not os.path.isdir(podir):
478 self.warn("could not find %s/ directory" % podir)
478 self.warn("could not find %s/ directory" % podir)
479 return
479 return
480
480
481 join = os.path.join
481 join = os.path.join
482 for po in os.listdir(podir):
482 for po in os.listdir(podir):
483 if not po.endswith('.po'):
483 if not po.endswith('.po'):
484 continue
484 continue
485 pofile = join(podir, po)
485 pofile = join(podir, po)
486 modir = join('locale', po[:-3], 'LC_MESSAGES')
486 modir = join('locale', po[:-3], 'LC_MESSAGES')
487 mofile = join(modir, 'hg.mo')
487 mofile = join(modir, 'hg.mo')
488 mobuildfile = join('mercurial', mofile)
488 mobuildfile = join('mercurial', mofile)
489 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
489 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
490 if sys.platform != 'sunos5':
490 if sys.platform != 'sunos5':
491 # msgfmt on Solaris does not know about -c
491 # msgfmt on Solaris does not know about -c
492 cmd.append('-c')
492 cmd.append('-c')
493 self.mkpath(join('mercurial', modir))
493 self.mkpath(join('mercurial', modir))
494 self.make_file([pofile], mobuildfile, spawn, (cmd,))
494 self.make_file([pofile], mobuildfile, spawn, (cmd,))
495
495
496
496
497 class hgdist(Distribution):
497 class hgdist(Distribution):
498 pure = False
498 pure = False
499 rust = False
499 rust = False
500 no_rust = False
500 no_rust = False
501 cffi = ispypy
501 cffi = ispypy
502
502
503 global_options = Distribution.global_options + [
503 global_options = Distribution.global_options + [
504 ('pure', None, "use pure (slow) Python code instead of C extensions"),
504 ('pure', None, "use pure (slow) Python code instead of C extensions"),
505 ('rust', None, "use Rust extensions additionally to C extensions"),
505 ('rust', None, "use Rust extensions additionally to C extensions"),
506 (
506 (
507 'no-rust',
507 'no-rust',
508 None,
508 None,
509 "do not use Rust extensions additionally to C extensions",
509 "do not use Rust extensions additionally to C extensions",
510 ),
510 ),
511 ]
511 ]
512
512
513 negative_opt = Distribution.negative_opt.copy()
513 negative_opt = Distribution.negative_opt.copy()
514 boolean_options = ['pure', 'rust', 'no-rust']
514 boolean_options = ['pure', 'rust', 'no-rust']
515 negative_opt['no-rust'] = 'rust'
515 negative_opt['no-rust'] = 'rust'
516
516
517 def _set_command_options(self, command_obj, option_dict=None):
517 def _set_command_options(self, command_obj, option_dict=None):
518 # Not all distutils versions in the wild have boolean_options.
518 # Not all distutils versions in the wild have boolean_options.
519 # This should be cleaned up when we're Python 3 only.
519 # This should be cleaned up when we're Python 3 only.
520 command_obj.boolean_options = (
520 command_obj.boolean_options = (
521 getattr(command_obj, 'boolean_options', []) + self.boolean_options
521 getattr(command_obj, 'boolean_options', []) + self.boolean_options
522 )
522 )
523 return Distribution._set_command_options(
523 return Distribution._set_command_options(
524 self, command_obj, option_dict=option_dict
524 self, command_obj, option_dict=option_dict
525 )
525 )
526
526
527 def parse_command_line(self):
527 def parse_command_line(self):
528 ret = Distribution.parse_command_line(self)
528 ret = Distribution.parse_command_line(self)
529 if not (self.rust or self.no_rust):
529 if not (self.rust or self.no_rust):
530 hgrustext = os.environ.get('HGWITHRUSTEXT')
530 hgrustext = os.environ.get('HGWITHRUSTEXT')
531 # TODO record it for proper rebuild upon changes
531 # TODO record it for proper rebuild upon changes
532 # (see mercurial/__modulepolicy__.py)
532 # (see mercurial/__modulepolicy__.py)
533 if hgrustext != 'cpython' and hgrustext is not None:
533 if hgrustext != 'cpython' and hgrustext is not None:
534 if hgrustext:
534 if hgrustext:
535 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
535 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
536 print(msg, file=sys.stderr)
536 print(msg, file=sys.stderr)
537 hgrustext = None
537 hgrustext = None
538 self.rust = hgrustext is not None
538 self.rust = hgrustext is not None
539 self.no_rust = not self.rust
539 self.no_rust = not self.rust
540 return ret
540 return ret
541
541
542 def has_ext_modules(self):
542 def has_ext_modules(self):
543 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
543 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
544 # too late for some cases
544 # too late for some cases
545 return not self.pure and Distribution.has_ext_modules(self)
545 return not self.pure and Distribution.has_ext_modules(self)
546
546
547
547
548 # This is ugly as a one-liner. So use a variable.
548 # This is ugly as a one-liner. So use a variable.
549 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
549 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
550 buildextnegops['no-zstd'] = 'zstd'
550 buildextnegops['no-zstd'] = 'zstd'
551 buildextnegops['no-rust'] = 'rust'
551 buildextnegops['no-rust'] = 'rust'
552
552
553
553
554 class hgbuildext(build_ext):
554 class hgbuildext(build_ext):
555 user_options = build_ext.user_options + [
555 user_options = build_ext.user_options + [
556 ('zstd', None, 'compile zstd bindings [default]'),
556 ('zstd', None, 'compile zstd bindings [default]'),
557 ('no-zstd', None, 'do not compile zstd bindings'),
557 ('no-zstd', None, 'do not compile zstd bindings'),
558 (
558 (
559 'rust',
559 'rust',
560 None,
560 None,
561 'compile Rust extensions if they are in use '
561 'compile Rust extensions if they are in use '
562 '(requires Cargo) [default]',
562 '(requires Cargo) [default]',
563 ),
563 ),
564 ('no-rust', None, 'do not compile Rust extensions'),
564 ('no-rust', None, 'do not compile Rust extensions'),
565 ]
565 ]
566
566
567 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
567 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
568 negative_opt = buildextnegops
568 negative_opt = buildextnegops
569
569
570 def initialize_options(self):
570 def initialize_options(self):
571 self.zstd = True
571 self.zstd = True
572 self.rust = True
572 self.rust = True
573
573
574 return build_ext.initialize_options(self)
574 return build_ext.initialize_options(self)
575
575
576 def finalize_options(self):
576 def finalize_options(self):
577 # Unless overridden by the end user, build extensions in parallel.
577 # Unless overridden by the end user, build extensions in parallel.
578 # Only influences behavior on Python 3.5+.
578 # Only influences behavior on Python 3.5+.
579 if getattr(self, 'parallel', None) is None:
579 if getattr(self, 'parallel', None) is None:
580 self.parallel = True
580 self.parallel = True
581
581
582 return build_ext.finalize_options(self)
582 return build_ext.finalize_options(self)
583
583
584 def build_extensions(self):
584 def build_extensions(self):
585 ruststandalones = [
585 ruststandalones = [
586 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
586 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
587 ]
587 ]
588 self.extensions = [
588 self.extensions = [
589 e for e in self.extensions if e not in ruststandalones
589 e for e in self.extensions if e not in ruststandalones
590 ]
590 ]
591 # Filter out zstd if disabled via argument.
591 # Filter out zstd if disabled via argument.
592 if not self.zstd:
592 if not self.zstd:
593 self.extensions = [
593 self.extensions = [
594 e for e in self.extensions if e.name != 'mercurial.zstd'
594 e for e in self.extensions if e.name != 'mercurial.zstd'
595 ]
595 ]
596
596
597 # Build Rust standalone extensions if it'll be used
597 # Build Rust standalone extensions if it'll be used
598 # and its build is not explicitly disabled (for external build
598 # and its build is not explicitly disabled (for external build
599 # as Linux distributions would do)
599 # as Linux distributions would do)
600 if self.distribution.rust and self.rust:
600 if self.distribution.rust and self.rust:
601 if not sys.platform.startswith('linux'):
601 if not sys.platform.startswith('linux'):
602 self.warn(
602 self.warn(
603 "rust extensions have only been tested on Linux "
603 "rust extensions have only been tested on Linux "
604 "and may not behave correctly on other platforms"
604 "and may not behave correctly on other platforms"
605 )
605 )
606
606
607 for rustext in ruststandalones:
607 for rustext in ruststandalones:
608 rustext.build('' if self.inplace else self.build_lib)
608 rustext.build('' if self.inplace else self.build_lib)
609
609
610 return build_ext.build_extensions(self)
610 return build_ext.build_extensions(self)
611
611
612 def build_extension(self, ext):
612 def build_extension(self, ext):
613 if (
613 if (
614 self.distribution.rust
614 self.distribution.rust
615 and self.rust
615 and self.rust
616 and isinstance(ext, RustExtension)
616 and isinstance(ext, RustExtension)
617 ):
617 ):
618 ext.rustbuild()
618 ext.rustbuild()
619 try:
619 try:
620 build_ext.build_extension(self, ext)
620 build_ext.build_extension(self, ext)
621 except CCompilerError:
621 except CCompilerError:
622 if not getattr(ext, 'optional', False):
622 if not getattr(ext, 'optional', False):
623 raise
623 raise
624 log.warn(
624 log.warn(
625 "Failed to build optional extension '%s' (skipping)", ext.name
625 "Failed to build optional extension '%s' (skipping)", ext.name
626 )
626 )
627
627
628
628
629 class hgbuildscripts(build_scripts):
629 class hgbuildscripts(build_scripts):
630 def run(self):
630 def run(self):
631 if os.name != 'nt' or self.distribution.pure:
631 if os.name != 'nt' or self.distribution.pure:
632 return build_scripts.run(self)
632 return build_scripts.run(self)
633
633
634 exebuilt = False
634 exebuilt = False
635 try:
635 try:
636 self.run_command('build_hgexe')
636 self.run_command('build_hgexe')
637 exebuilt = True
637 exebuilt = True
638 except (DistutilsError, CCompilerError):
638 except (DistutilsError, CCompilerError):
639 log.warn('failed to build optional hg.exe')
639 log.warn('failed to build optional hg.exe')
640
640
641 if exebuilt:
641 if exebuilt:
642 # Copying hg.exe to the scripts build directory ensures it is
642 # Copying hg.exe to the scripts build directory ensures it is
643 # installed by the install_scripts command.
643 # installed by the install_scripts command.
644 hgexecommand = self.get_finalized_command('build_hgexe')
644 hgexecommand = self.get_finalized_command('build_hgexe')
645 dest = os.path.join(self.build_dir, 'hg.exe')
645 dest = os.path.join(self.build_dir, 'hg.exe')
646 self.mkpath(self.build_dir)
646 self.mkpath(self.build_dir)
647 self.copy_file(hgexecommand.hgexepath, dest)
647 self.copy_file(hgexecommand.hgexepath, dest)
648
648
649 # Remove hg.bat because it is redundant with hg.exe.
649 # Remove hg.bat because it is redundant with hg.exe.
650 self.scripts.remove('contrib/win32/hg.bat')
650 self.scripts.remove('contrib/win32/hg.bat')
651
651
652 return build_scripts.run(self)
652 return build_scripts.run(self)
653
653
654
654
655 class hgbuildpy(build_py):
655 class hgbuildpy(build_py):
656 def finalize_options(self):
656 def finalize_options(self):
657 build_py.finalize_options(self)
657 build_py.finalize_options(self)
658
658
659 if self.distribution.pure:
659 if self.distribution.pure:
660 self.distribution.ext_modules = []
660 self.distribution.ext_modules = []
661 elif self.distribution.cffi:
661 elif self.distribution.cffi:
662 from mercurial.cffi import (
662 from mercurial.cffi import (
663 bdiffbuild,
663 bdiffbuild,
664 mpatchbuild,
664 mpatchbuild,
665 )
665 )
666
666
667 exts = [
667 exts = [
668 mpatchbuild.ffi.distutils_extension(),
668 mpatchbuild.ffi.distutils_extension(),
669 bdiffbuild.ffi.distutils_extension(),
669 bdiffbuild.ffi.distutils_extension(),
670 ]
670 ]
671 # cffi modules go here
671 # cffi modules go here
672 if sys.platform == 'darwin':
672 if sys.platform == 'darwin':
673 from mercurial.cffi import osutilbuild
673 from mercurial.cffi import osutilbuild
674
674
675 exts.append(osutilbuild.ffi.distutils_extension())
675 exts.append(osutilbuild.ffi.distutils_extension())
676 self.distribution.ext_modules = exts
676 self.distribution.ext_modules = exts
677 else:
677 else:
678 h = os.path.join(get_python_inc(), 'Python.h')
678 h = os.path.join(get_python_inc(), 'Python.h')
679 if not os.path.exists(h):
679 if not os.path.exists(h):
680 raise SystemExit(
680 raise SystemExit(
681 'Python headers are required to build '
681 'Python headers are required to build '
682 'Mercurial but weren\'t found in %s' % h
682 'Mercurial but weren\'t found in %s' % h
683 )
683 )
684
684
685 def run(self):
685 def run(self):
686 basepath = os.path.join(self.build_lib, 'mercurial')
686 basepath = os.path.join(self.build_lib, 'mercurial')
687 self.mkpath(basepath)
687 self.mkpath(basepath)
688
688
689 rust = self.distribution.rust
689 rust = self.distribution.rust
690 if self.distribution.pure:
690 if self.distribution.pure:
691 modulepolicy = 'py'
691 modulepolicy = 'py'
692 elif self.build_lib == '.':
692 elif self.build_lib == '.':
693 # in-place build should run without rebuilding and Rust extensions
693 # in-place build should run without rebuilding and Rust extensions
694 modulepolicy = 'rust+c-allow' if rust else 'allow'
694 modulepolicy = 'rust+c-allow' if rust else 'allow'
695 else:
695 else:
696 modulepolicy = 'rust+c' if rust else 'c'
696 modulepolicy = 'rust+c' if rust else 'c'
697
697
698 content = b''.join(
698 content = b''.join(
699 [
699 [
700 b'# this file is autogenerated by setup.py\n',
700 b'# this file is autogenerated by setup.py\n',
701 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
701 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
702 ]
702 ]
703 )
703 )
704 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
704 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
705
705
706 build_py.run(self)
706 build_py.run(self)
707
707
708
708
709 class buildhgextindex(Command):
709 class buildhgextindex(Command):
710 description = 'generate prebuilt index of hgext (for frozen package)'
710 description = 'generate prebuilt index of hgext (for frozen package)'
711 user_options = []
711 user_options = []
712 _indexfilename = 'hgext/__index__.py'
712 _indexfilename = 'hgext/__index__.py'
713
713
714 def initialize_options(self):
714 def initialize_options(self):
715 pass
715 pass
716
716
717 def finalize_options(self):
717 def finalize_options(self):
718 pass
718 pass
719
719
720 def run(self):
720 def run(self):
721 if os.path.exists(self._indexfilename):
721 if os.path.exists(self._indexfilename):
722 with open(self._indexfilename, 'w') as f:
722 with open(self._indexfilename, 'w') as f:
723 f.write('# empty\n')
723 f.write('# empty\n')
724
724
725 # here no extension enabled, disabled() lists up everything
725 # here no extension enabled, disabled() lists up everything
726 code = (
726 code = (
727 'import pprint; from mercurial import extensions; '
727 'import pprint; from mercurial import extensions; '
728 'ext = extensions.disabled();'
728 'ext = extensions.disabled();'
729 'ext.pop("__index__", None);'
729 'ext.pop("__index__", None);'
730 'pprint.pprint(ext)'
730 'pprint.pprint(ext)'
731 )
731 )
732 returncode, out, err = runcmd(
732 returncode, out, err = runcmd(
733 [sys.executable, '-c', code], localhgenv()
733 [sys.executable, '-c', code], localhgenv()
734 )
734 )
735 if err or returncode != 0:
735 if err or returncode != 0:
736 raise DistutilsExecError(err)
736 raise DistutilsExecError(err)
737
737
738 with open(self._indexfilename, 'wb') as f:
738 with open(self._indexfilename, 'wb') as f:
739 f.write(b'# this file is autogenerated by setup.py\n')
739 f.write(b'# this file is autogenerated by setup.py\n')
740 f.write(b'docs = ')
740 f.write(b'docs = ')
741 f.write(out)
741 f.write(out)
742
742
743
743
744 class buildhgexe(build_ext):
744 class buildhgexe(build_ext):
745 description = 'compile hg.exe from mercurial/exewrapper.c'
745 description = 'compile hg.exe from mercurial/exewrapper.c'
746
746
747 LONG_PATHS_MANIFEST = """\
747 LONG_PATHS_MANIFEST = """\
748 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
748 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
749 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
749 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
750 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
750 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
751 <security>
751 <security>
752 <requestedPrivileges>
752 <requestedPrivileges>
753 <requestedExecutionLevel
753 <requestedExecutionLevel
754 level="asInvoker"
754 level="asInvoker"
755 uiAccess="false"
755 uiAccess="false"
756 />
756 />
757 </requestedPrivileges>
757 </requestedPrivileges>
758 </security>
758 </security>
759 </trustInfo>
759 </trustInfo>
760 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
760 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
761 <application>
761 <application>
762 <!-- Windows Vista -->
762 <!-- Windows Vista -->
763 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
763 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
764 <!-- Windows 7 -->
764 <!-- Windows 7 -->
765 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
765 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
766 <!-- Windows 8 -->
766 <!-- Windows 8 -->
767 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
767 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
768 <!-- Windows 8.1 -->
768 <!-- Windows 8.1 -->
769 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
769 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
770 <!-- Windows 10 and Windows 11 -->
770 <!-- Windows 10 and Windows 11 -->
771 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
771 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
772 </application>
772 </application>
773 </compatibility>
773 </compatibility>
774 <application xmlns="urn:schemas-microsoft-com:asm.v3">
774 <application xmlns="urn:schemas-microsoft-com:asm.v3">
775 <windowsSettings
775 <windowsSettings
776 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
776 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
777 <ws2:longPathAware>true</ws2:longPathAware>
777 <ws2:longPathAware>true</ws2:longPathAware>
778 </windowsSettings>
778 </windowsSettings>
779 </application>
779 </application>
780 <dependency>
780 <dependency>
781 <dependentAssembly>
781 <dependentAssembly>
782 <assemblyIdentity type="win32"
782 <assemblyIdentity type="win32"
783 name="Microsoft.Windows.Common-Controls"
783 name="Microsoft.Windows.Common-Controls"
784 version="6.0.0.0"
784 version="6.0.0.0"
785 processorArchitecture="*"
785 processorArchitecture="*"
786 publicKeyToken="6595b64144ccf1df"
786 publicKeyToken="6595b64144ccf1df"
787 language="*" />
787 language="*" />
788 </dependentAssembly>
788 </dependentAssembly>
789 </dependency>
789 </dependency>
790 </assembly>
790 </assembly>
791 """
791 """
792
792
793 def initialize_options(self):
793 def initialize_options(self):
794 build_ext.initialize_options(self)
794 build_ext.initialize_options(self)
795
795
796 def build_extensions(self):
796 def build_extensions(self):
797 if os.name != 'nt':
797 if os.name != 'nt':
798 return
798 return
799 if isinstance(self.compiler, HackedMingw32CCompiler):
799 if isinstance(self.compiler, HackedMingw32CCompiler):
800 self.compiler.compiler_so = self.compiler.compiler # no -mdll
800 self.compiler.compiler_so = self.compiler.compiler # no -mdll
801 self.compiler.dll_libraries = [] # no -lmsrvc90
801 self.compiler.dll_libraries = [] # no -lmsrvc90
802
802
803 pythonlib = None
803 pythonlib = None
804
804
805 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
805 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
806 self.hgtarget = os.path.join(dirname, 'hg')
806 self.hgtarget = os.path.join(dirname, 'hg')
807
807
808 if getattr(sys, 'dllhandle', None):
808 if getattr(sys, 'dllhandle', None):
809 # Different Python installs can have different Python library
809 # Different Python installs can have different Python library
810 # names. e.g. the official CPython distribution uses pythonXY.dll
810 # names. e.g. the official CPython distribution uses pythonXY.dll
811 # and MinGW uses libpythonX.Y.dll.
811 # and MinGW uses libpythonX.Y.dll.
812 _kernel32 = ctypes.windll.kernel32
812 _kernel32 = ctypes.windll.kernel32
813 _kernel32.GetModuleFileNameA.argtypes = [
813 _kernel32.GetModuleFileNameA.argtypes = [
814 ctypes.c_void_p,
814 ctypes.c_void_p,
815 ctypes.c_void_p,
815 ctypes.c_void_p,
816 ctypes.c_ulong,
816 ctypes.c_ulong,
817 ]
817 ]
818 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
818 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
819 size = 1000
819 size = 1000
820 buf = ctypes.create_string_buffer(size + 1)
820 buf = ctypes.create_string_buffer(size + 1)
821 filelen = _kernel32.GetModuleFileNameA(
821 filelen = _kernel32.GetModuleFileNameA(
822 sys.dllhandle, ctypes.byref(buf), size
822 sys.dllhandle, ctypes.byref(buf), size
823 )
823 )
824
824
825 if filelen > 0 and filelen != size:
825 if filelen > 0 and filelen != size:
826 dllbasename = os.path.basename(buf.value)
826 dllbasename = os.path.basename(buf.value)
827 if not dllbasename.lower().endswith(b'.dll'):
827 if not dllbasename.lower().endswith(b'.dll'):
828 raise SystemExit(
828 raise SystemExit(
829 'Python DLL does not end with .dll: %s' % dllbasename
829 'Python DLL does not end with .dll: %s' % dllbasename
830 )
830 )
831 pythonlib = dllbasename[:-4]
831 pythonlib = dllbasename[:-4]
832
832
833 # Copy the pythonXY.dll next to the binary so that it runs
833 # Copy the pythonXY.dll next to the binary so that it runs
834 # without tampering with PATH.
834 # without tampering with PATH.
835 dest = os.path.join(
835 dest = os.path.join(
836 os.path.dirname(self.hgtarget),
836 os.path.dirname(self.hgtarget),
837 os.fsdecode(dllbasename),
837 os.fsdecode(dllbasename),
838 )
838 )
839
839
840 if not os.path.exists(dest):
840 if not os.path.exists(dest):
841 shutil.copy(buf.value, dest)
841 shutil.copy(buf.value, dest)
842
842
843 # Also overwrite python3.dll so that hgext.git is usable.
843 # Also overwrite python3.dll so that hgext.git is usable.
844 # TODO: also handle the MSYS flavor
844 # TODO: also handle the MSYS flavor
845 python_x = os.path.join(
845 python_x = os.path.join(
846 os.path.dirname(os.fsdecode(buf.value)),
846 os.path.dirname(os.fsdecode(buf.value)),
847 "python3.dll",
847 "python3.dll",
848 )
848 )
849
849
850 if os.path.exists(python_x):
850 if os.path.exists(python_x):
851 dest = os.path.join(
851 dest = os.path.join(
852 os.path.dirname(self.hgtarget),
852 os.path.dirname(self.hgtarget),
853 os.path.basename(python_x),
853 os.path.basename(python_x),
854 )
854 )
855
855
856 shutil.copy(python_x, dest)
856 shutil.copy(python_x, dest)
857
857
858 if not pythonlib:
858 if not pythonlib:
859 log.warn(
859 log.warn(
860 'could not determine Python DLL filename; assuming pythonXY'
860 'could not determine Python DLL filename; assuming pythonXY'
861 )
861 )
862
862
863 hv = sys.hexversion
863 hv = sys.hexversion
864 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
864 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
865
865
866 log.info('using %s as Python library name' % pythonlib)
866 log.info('using %s as Python library name' % pythonlib)
867 with open('mercurial/hgpythonlib.h', 'wb') as f:
867 with open('mercurial/hgpythonlib.h', 'wb') as f:
868 f.write(b'/* this file is autogenerated by setup.py */\n')
868 f.write(b'/* this file is autogenerated by setup.py */\n')
869 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
869 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
870
870
871 objects = self.compiler.compile(
871 objects = self.compiler.compile(
872 ['mercurial/exewrapper.c'],
872 ['mercurial/exewrapper.c'],
873 output_dir=self.build_temp,
873 output_dir=self.build_temp,
874 macros=[('_UNICODE', None), ('UNICODE', None)],
874 macros=[('_UNICODE', None), ('UNICODE', None)],
875 )
875 )
876 self.compiler.link_executable(
876 self.compiler.link_executable(
877 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
877 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
878 )
878 )
879
879
880 self.addlongpathsmanifest()
880 self.addlongpathsmanifest()
881
881
882 def addlongpathsmanifest(self):
882 def addlongpathsmanifest(self):
883 """Add manifest pieces so that hg.exe understands long paths
883 """Add manifest pieces so that hg.exe understands long paths
884
884
885 Why resource #1 should be used for .exe manifests? I don't know and
885 Why resource #1 should be used for .exe manifests? I don't know and
886 wasn't able to find an explanation for mortals. But it seems to work.
886 wasn't able to find an explanation for mortals. But it seems to work.
887 """
887 """
888 exefname = self.compiler.executable_filename(self.hgtarget)
888 exefname = self.compiler.executable_filename(self.hgtarget)
889 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
889 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
890 os.close(fdauto)
890 os.close(fdauto)
891 with open(manfname, 'w', encoding="UTF-8") as f:
891 with open(manfname, 'w', encoding="UTF-8") as f:
892 f.write(self.LONG_PATHS_MANIFEST)
892 f.write(self.LONG_PATHS_MANIFEST)
893 log.info("long paths manifest is written to '%s'" % manfname)
893 log.info("long paths manifest is written to '%s'" % manfname)
894 outputresource = '-outputresource:%s;#1' % exefname
894 outputresource = '-outputresource:%s;#1' % exefname
895 log.info("running mt.exe to update hg.exe's manifest in-place")
895 log.info("running mt.exe to update hg.exe's manifest in-place")
896
896
897 self.spawn(
897 self.spawn(
898 [
898 [
899 self.compiler.mt,
899 self.compiler.mt,
900 '-nologo',
900 '-nologo',
901 '-manifest',
901 '-manifest',
902 manfname,
902 manfname,
903 outputresource,
903 outputresource,
904 ]
904 ]
905 )
905 )
906 log.info("done updating hg.exe's manifest")
906 log.info("done updating hg.exe's manifest")
907 os.remove(manfname)
907 os.remove(manfname)
908
908
909 @property
909 @property
910 def hgexepath(self):
910 def hgexepath(self):
911 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
911 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
912 return os.path.join(self.build_temp, dir, 'hg.exe')
912 return os.path.join(self.build_temp, dir, 'hg.exe')
913
913
914
914
915 class hgbuilddoc(Command):
915 class hgbuilddoc(Command):
916 description = 'build documentation'
916 description = 'build documentation'
917 user_options = [
917 user_options = [
918 ('man', None, 'generate man pages'),
918 ('man', None, 'generate man pages'),
919 ('html', None, 'generate html pages'),
919 ('html', None, 'generate html pages'),
920 ]
920 ]
921
921
922 def initialize_options(self):
922 def initialize_options(self):
923 self.man = None
923 self.man = None
924 self.html = None
924 self.html = None
925
925
926 def finalize_options(self):
926 def finalize_options(self):
927 # If --man or --html are set, only generate what we're told to.
927 # If --man or --html are set, only generate what we're told to.
928 # Otherwise generate everything.
928 # Otherwise generate everything.
929 have_subset = self.man is not None or self.html is not None
929 have_subset = self.man is not None or self.html is not None
930
930
931 if have_subset:
931 if have_subset:
932 self.man = True if self.man else False
932 self.man = True if self.man else False
933 self.html = True if self.html else False
933 self.html = True if self.html else False
934 else:
934 else:
935 self.man = True
935 self.man = True
936 self.html = True
936 self.html = True
937
937
938 def run(self):
938 def run(self):
939 def normalizecrlf(p):
939 def normalizecrlf(p):
940 with open(p, 'rb') as fh:
940 with open(p, 'rb') as fh:
941 orig = fh.read()
941 orig = fh.read()
942
942
943 if b'\r\n' not in orig:
943 if b'\r\n' not in orig:
944 return
944 return
945
945
946 log.info('normalizing %s to LF line endings' % p)
946 log.info('normalizing %s to LF line endings' % p)
947 with open(p, 'wb') as fh:
947 with open(p, 'wb') as fh:
948 fh.write(orig.replace(b'\r\n', b'\n'))
948 fh.write(orig.replace(b'\r\n', b'\n'))
949
949
950 def gentxt(root):
950 def gentxt(root):
951 txt = 'doc/%s.txt' % root
951 txt = 'doc/%s.txt' % root
952 log.info('generating %s' % txt)
952 log.info('generating %s' % txt)
953 res, out, err = runcmd(
953 res, out, err = runcmd(
954 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
954 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
955 )
955 )
956 if res:
956 if res:
957 raise SystemExit(
957 raise SystemExit(
958 'error running gendoc.py: %s'
958 'error running gendoc.py: %s'
959 % '\n'.join([sysstr(out), sysstr(err)])
959 % '\n'.join([sysstr(out), sysstr(err)])
960 )
960 )
961
961
962 with open(txt, 'wb') as fh:
962 with open(txt, 'wb') as fh:
963 fh.write(out)
963 fh.write(out)
964
964
965 def gengendoc(root):
965 def gengendoc(root):
966 gendoc = 'doc/%s.gendoc.txt' % root
966 gendoc = 'doc/%s.gendoc.txt' % root
967
967
968 log.info('generating %s' % gendoc)
968 log.info('generating %s' % gendoc)
969 res, out, err = runcmd(
969 res, out, err = runcmd(
970 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
970 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
971 os.environ,
971 os.environ,
972 cwd='doc',
972 cwd='doc',
973 )
973 )
974 if res:
974 if res:
975 raise SystemExit(
975 raise SystemExit(
976 'error running gendoc: %s'
976 'error running gendoc: %s'
977 % '\n'.join([sysstr(out), sysstr(err)])
977 % '\n'.join([sysstr(out), sysstr(err)])
978 )
978 )
979
979
980 with open(gendoc, 'wb') as fh:
980 with open(gendoc, 'wb') as fh:
981 fh.write(out)
981 fh.write(out)
982
982
983 def genman(root):
983 def genman(root):
984 log.info('generating doc/%s' % root)
984 log.info('generating doc/%s' % root)
985 res, out, err = runcmd(
985 res, out, err = runcmd(
986 [
986 [
987 sys.executable,
987 sys.executable,
988 'runrst',
988 'runrst',
989 'hgmanpage',
989 'hgmanpage',
990 '--halt',
990 '--halt',
991 'warning',
991 'warning',
992 '--strip-elements-with-class',
992 '--strip-elements-with-class',
993 'htmlonly',
993 'htmlonly',
994 '%s.txt' % root,
994 '%s.txt' % root,
995 root,
995 root,
996 ],
996 ],
997 os.environ,
997 os.environ,
998 cwd='doc',
998 cwd='doc',
999 )
999 )
1000 if res:
1000 if res:
1001 raise SystemExit(
1001 raise SystemExit(
1002 'error running runrst: %s'
1002 'error running runrst: %s'
1003 % '\n'.join([sysstr(out), sysstr(err)])
1003 % '\n'.join([sysstr(out), sysstr(err)])
1004 )
1004 )
1005
1005
1006 normalizecrlf('doc/%s' % root)
1006 normalizecrlf('doc/%s' % root)
1007
1007
1008 def genhtml(root):
1008 def genhtml(root):
1009 log.info('generating doc/%s.html' % root)
1009 log.info('generating doc/%s.html' % root)
1010 res, out, err = runcmd(
1010 res, out, err = runcmd(
1011 [
1011 [
1012 sys.executable,
1012 sys.executable,
1013 'runrst',
1013 'runrst',
1014 'html',
1014 'html',
1015 '--halt',
1015 '--halt',
1016 'warning',
1016 'warning',
1017 '--link-stylesheet',
1017 '--link-stylesheet',
1018 '--stylesheet-path',
1018 '--stylesheet-path',
1019 'style.css',
1019 'style.css',
1020 '%s.txt' % root,
1020 '%s.txt' % root,
1021 '%s.html' % root,
1021 '%s.html' % root,
1022 ],
1022 ],
1023 os.environ,
1023 os.environ,
1024 cwd='doc',
1024 cwd='doc',
1025 )
1025 )
1026 if res:
1026 if res:
1027 raise SystemExit(
1027 raise SystemExit(
1028 'error running runrst: %s'
1028 'error running runrst: %s'
1029 % '\n'.join([sysstr(out), sysstr(err)])
1029 % '\n'.join([sysstr(out), sysstr(err)])
1030 )
1030 )
1031
1031
1032 normalizecrlf('doc/%s.html' % root)
1032 normalizecrlf('doc/%s.html' % root)
1033
1033
1034 # This logic is duplicated in doc/Makefile.
1034 # This logic is duplicated in doc/Makefile.
1035 sources = {
1035 sources = {
1036 f
1036 f
1037 for f in os.listdir('mercurial/helptext')
1037 for f in os.listdir('mercurial/helptext')
1038 if re.search(r'[0-9]\.txt$', f)
1038 if re.search(r'[0-9]\.txt$', f)
1039 }
1039 }
1040
1040
1041 # common.txt is a one-off.
1041 # common.txt is a one-off.
1042 gentxt('common')
1042 gentxt('common')
1043
1043
1044 for source in sorted(sources):
1044 for source in sorted(sources):
1045 assert source[-4:] == '.txt'
1045 assert source[-4:] == '.txt'
1046 root = source[:-4]
1046 root = source[:-4]
1047
1047
1048 gentxt(root)
1048 gentxt(root)
1049 gengendoc(root)
1049 gengendoc(root)
1050
1050
1051 if self.man:
1051 if self.man:
1052 genman(root)
1052 genman(root)
1053 if self.html:
1053 if self.html:
1054 genhtml(root)
1054 genhtml(root)
1055
1055
1056
1056
1057 class hginstall(install):
1057 class hginstall(install):
1058 user_options = install.user_options + [
1058 user_options = install.user_options + [
1059 (
1059 (
1060 'old-and-unmanageable',
1060 'old-and-unmanageable',
1061 None,
1061 None,
1062 'noop, present for eggless setuptools compat',
1062 'noop, present for eggless setuptools compat',
1063 ),
1063 ),
1064 (
1064 (
1065 'single-version-externally-managed',
1065 'single-version-externally-managed',
1066 None,
1066 None,
1067 'noop, present for eggless setuptools compat',
1067 'noop, present for eggless setuptools compat',
1068 ),
1068 ),
1069 ]
1069 ]
1070
1070
1071 sub_commands = install.sub_commands + [
1071 sub_commands = install.sub_commands + [
1072 ('install_completion', lambda self: True)
1072 ('install_completion', lambda self: True)
1073 ]
1073 ]
1074
1074
1075 # Also helps setuptools not be sad while we refuse to create eggs.
1075 # Also helps setuptools not be sad while we refuse to create eggs.
1076 single_version_externally_managed = True
1076 single_version_externally_managed = True
1077
1077
1078 def get_sub_commands(self):
1078 def get_sub_commands(self):
1079 # Screen out egg related commands to prevent egg generation. But allow
1079 # Screen out egg related commands to prevent egg generation. But allow
1080 # mercurial.egg-info generation, since that is part of modern
1080 # mercurial.egg-info generation, since that is part of modern
1081 # packaging.
1081 # packaging.
1082 excl = {'bdist_egg'}
1082 excl = {'bdist_egg'}
1083 return filter(lambda x: x not in excl, install.get_sub_commands(self))
1083 return filter(lambda x: x not in excl, install.get_sub_commands(self))
1084
1084
1085
1085
1086 class hginstalllib(install_lib):
1086 class hginstalllib(install_lib):
1087 """
1087 """
1088 This is a specialization of install_lib that replaces the copy_file used
1088 This is a specialization of install_lib that replaces the copy_file used
1089 there so that it supports setting the mode of files after copying them,
1089 there so that it supports setting the mode of files after copying them,
1090 instead of just preserving the mode that the files originally had. If your
1090 instead of just preserving the mode that the files originally had. If your
1091 system has a umask of something like 027, preserving the permissions when
1091 system has a umask of something like 027, preserving the permissions when
1092 copying will lead to a broken install.
1092 copying will lead to a broken install.
1093
1093
1094 Note that just passing keep_permissions=False to copy_file would be
1094 Note that just passing keep_permissions=False to copy_file would be
1095 insufficient, as it might still be applying a umask.
1095 insufficient, as it might still be applying a umask.
1096 """
1096 """
1097
1097
1098 def run(self):
1098 def run(self):
1099 realcopyfile = file_util.copy_file
1099 realcopyfile = file_util.copy_file
1100
1100
1101 def copyfileandsetmode(*args, **kwargs):
1101 def copyfileandsetmode(*args, **kwargs):
1102 src, dst = args[0], args[1]
1102 src, dst = args[0], args[1]
1103 dst, copied = realcopyfile(*args, **kwargs)
1103 dst, copied = realcopyfile(*args, **kwargs)
1104 if copied:
1104 if copied:
1105 st = os.stat(src)
1105 st = os.stat(src)
1106 # Persist executable bit (apply it to group and other if user
1106 # Persist executable bit (apply it to group and other if user
1107 # has it)
1107 # has it)
1108 if st[stat.ST_MODE] & stat.S_IXUSR:
1108 if st[stat.ST_MODE] & stat.S_IXUSR:
1109 setmode = int('0755', 8)
1109 setmode = int('0755', 8)
1110 else:
1110 else:
1111 setmode = int('0644', 8)
1111 setmode = int('0644', 8)
1112 m = stat.S_IMODE(st[stat.ST_MODE])
1112 m = stat.S_IMODE(st[stat.ST_MODE])
1113 m = (m & ~int('0777', 8)) | setmode
1113 m = (m & ~int('0777', 8)) | setmode
1114 os.chmod(dst, m)
1114 os.chmod(dst, m)
1115
1115
1116 file_util.copy_file = copyfileandsetmode
1116 file_util.copy_file = copyfileandsetmode
1117 try:
1117 try:
1118 install_lib.run(self)
1118 install_lib.run(self)
1119 finally:
1119 finally:
1120 file_util.copy_file = realcopyfile
1120 file_util.copy_file = realcopyfile
1121
1121
1122
1122
1123 class hginstallscripts(install_scripts):
1123 class hginstallscripts(install_scripts):
1124 """
1124 """
1125 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1125 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1126 the configured directory for modules. If possible, the path is made relative
1126 the configured directory for modules. If possible, the path is made relative
1127 to the directory for scripts.
1127 to the directory for scripts.
1128 """
1128 """
1129
1129
1130 def initialize_options(self):
1130 def initialize_options(self):
1131 install_scripts.initialize_options(self)
1131 install_scripts.initialize_options(self)
1132
1132
1133 self.install_lib = None
1133 self.install_lib = None
1134
1134
1135 def finalize_options(self):
1135 def finalize_options(self):
1136 install_scripts.finalize_options(self)
1136 install_scripts.finalize_options(self)
1137 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1137 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1138
1138
1139 def run(self):
1139 def run(self):
1140 install_scripts.run(self)
1140 install_scripts.run(self)
1141
1141
1142 # It only makes sense to replace @LIBDIR@ with the install path if
1142 # It only makes sense to replace @LIBDIR@ with the install path if
1143 # the install path is known. For wheels, the logic below calculates
1143 # the install path is known. For wheels, the logic below calculates
1144 # the libdir to be "../..". This is because the internal layout of a
1144 # the libdir to be "../..". This is because the internal layout of a
1145 # wheel archive looks like:
1145 # wheel archive looks like:
1146 #
1146 #
1147 # mercurial-3.6.1.data/scripts/hg
1147 # mercurial-3.6.1.data/scripts/hg
1148 # mercurial/__init__.py
1148 # mercurial/__init__.py
1149 #
1149 #
1150 # When installing wheels, the subdirectories of the "<pkg>.data"
1150 # When installing wheels, the subdirectories of the "<pkg>.data"
1151 # directory are translated to system local paths and files therein
1151 # directory are translated to system local paths and files therein
1152 # are copied in place. The mercurial/* files are installed into the
1152 # are copied in place. The mercurial/* files are installed into the
1153 # site-packages directory. However, the site-packages directory
1153 # site-packages directory. However, the site-packages directory
1154 # isn't known until wheel install time. This means we have no clue
1154 # isn't known until wheel install time. This means we have no clue
1155 # at wheel generation time what the installed site-packages directory
1155 # at wheel generation time what the installed site-packages directory
1156 # will be. And, wheels don't appear to provide the ability to register
1156 # will be. And, wheels don't appear to provide the ability to register
1157 # custom code to run during wheel installation. This all means that
1157 # custom code to run during wheel installation. This all means that
1158 # we can't reliably set the libdir in wheels: the default behavior
1158 # we can't reliably set the libdir in wheels: the default behavior
1159 # of looking in sys.path must do.
1159 # of looking in sys.path must do.
1160
1160
1161 if (
1161 if (
1162 os.path.splitdrive(self.install_dir)[0]
1162 os.path.splitdrive(self.install_dir)[0]
1163 != os.path.splitdrive(self.install_lib)[0]
1163 != os.path.splitdrive(self.install_lib)[0]
1164 ):
1164 ):
1165 # can't make relative paths from one drive to another, so use an
1165 # can't make relative paths from one drive to another, so use an
1166 # absolute path instead
1166 # absolute path instead
1167 libdir = self.install_lib
1167 libdir = self.install_lib
1168 else:
1168 else:
1169 libdir = os.path.relpath(self.install_lib, self.install_dir)
1169 libdir = os.path.relpath(self.install_lib, self.install_dir)
1170
1170
1171 for outfile in self.outfiles:
1171 for outfile in self.outfiles:
1172 with open(outfile, 'rb') as fp:
1172 with open(outfile, 'rb') as fp:
1173 data = fp.read()
1173 data = fp.read()
1174
1174
1175 # skip binary files
1175 # skip binary files
1176 if b'\0' in data:
1176 if b'\0' in data:
1177 continue
1177 continue
1178
1178
1179 # During local installs, the shebang will be rewritten to the final
1179 # During local installs, the shebang will be rewritten to the final
1180 # install path. During wheel packaging, the shebang has a special
1180 # install path. During wheel packaging, the shebang has a special
1181 # value.
1181 # value.
1182 if data.startswith(b'#!python'):
1182 if data.startswith(b'#!python'):
1183 log.info(
1183 log.info(
1184 'not rewriting @LIBDIR@ in %s because install path '
1184 'not rewriting @LIBDIR@ in %s because install path '
1185 'not known' % outfile
1185 'not known' % outfile
1186 )
1186 )
1187 continue
1187 continue
1188
1188
1189 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1189 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1190 with open(outfile, 'wb') as fp:
1190 with open(outfile, 'wb') as fp:
1191 fp.write(data)
1191 fp.write(data)
1192
1192
1193
1193
1194 class hginstallcompletion(Command):
1194 class hginstallcompletion(Command):
1195 description = 'Install shell completion'
1195 description = 'Install shell completion'
1196
1196
1197 def initialize_options(self):
1197 def initialize_options(self):
1198 self.install_dir = None
1198 self.install_dir = None
1199 self.outputs = []
1199 self.outputs = []
1200
1200
1201 def finalize_options(self):
1201 def finalize_options(self):
1202 self.set_undefined_options(
1202 self.set_undefined_options(
1203 'install_data', ('install_dir', 'install_dir')
1203 'install_data', ('install_dir', 'install_dir')
1204 )
1204 )
1205
1205
1206 def get_outputs(self):
1206 def get_outputs(self):
1207 return self.outputs
1207 return self.outputs
1208
1208
1209 def run(self):
1209 def run(self):
1210 for src, dir_path, dest in (
1210 for src, dir_path, dest in (
1211 (
1211 (
1212 'bash_completion',
1212 'bash_completion',
1213 ('share', 'bash-completion', 'completions'),
1213 ('share', 'bash-completion', 'completions'),
1214 'hg',
1214 'hg',
1215 ),
1215 ),
1216 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1216 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1217 ):
1217 ):
1218 dir = os.path.join(self.install_dir, *dir_path)
1218 dir = os.path.join(self.install_dir, *dir_path)
1219 self.mkpath(dir)
1219 self.mkpath(dir)
1220
1220
1221 dest = os.path.join(dir, dest)
1221 dest = os.path.join(dir, dest)
1222 self.outputs.append(dest)
1222 self.outputs.append(dest)
1223 self.copy_file(os.path.join('contrib', src), dest)
1223 self.copy_file(os.path.join('contrib', src), dest)
1224
1224
1225
1225
1226 # virtualenv installs custom distutils/__init__.py and
1226 # virtualenv installs custom distutils/__init__.py and
1227 # distutils/distutils.cfg files which essentially proxy back to the
1227 # distutils/distutils.cfg files which essentially proxy back to the
1228 # "real" distutils in the main Python install. The presence of this
1228 # "real" distutils in the main Python install. The presence of this
1229 # directory causes py2exe to pick up the "hacked" distutils package
1229 # directory causes py2exe to pick up the "hacked" distutils package
1230 # from the virtualenv and "import distutils" will fail from the py2exe
1230 # from the virtualenv and "import distutils" will fail from the py2exe
1231 # build because the "real" distutils files can't be located.
1231 # build because the "real" distutils files can't be located.
1232 #
1232 #
1233 # We work around this by monkeypatching the py2exe code finding Python
1233 # We work around this by monkeypatching the py2exe code finding Python
1234 # modules to replace the found virtualenv distutils modules with the
1234 # modules to replace the found virtualenv distutils modules with the
1235 # original versions via filesystem scanning. This is a bit hacky. But
1235 # original versions via filesystem scanning. This is a bit hacky. But
1236 # it allows us to use virtualenvs for py2exe packaging, which is more
1236 # it allows us to use virtualenvs for py2exe packaging, which is more
1237 # deterministic and reproducible.
1237 # deterministic and reproducible.
1238 #
1238 #
1239 # It's worth noting that the common StackOverflow suggestions for this
1239 # It's worth noting that the common StackOverflow suggestions for this
1240 # problem involve copying the original distutils files into the
1240 # problem involve copying the original distutils files into the
1241 # virtualenv or into the staging directory after setup() is invoked.
1241 # virtualenv or into the staging directory after setup() is invoked.
1242 # The former is very brittle and can easily break setup(). Our hacking
1242 # The former is very brittle and can easily break setup(). Our hacking
1243 # of the found modules routine has a similar result as copying the files
1243 # of the found modules routine has a similar result as copying the files
1244 # manually. But it makes fewer assumptions about how py2exe works and
1244 # manually. But it makes fewer assumptions about how py2exe works and
1245 # is less brittle.
1245 # is less brittle.
1246
1246
1247 # This only catches virtualenvs made with virtualenv (as opposed to
1247 # This only catches virtualenvs made with virtualenv (as opposed to
1248 # venv, which is likely what Python 3 uses).
1248 # venv, which is likely what Python 3 uses).
1249 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1249 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1250
1250
1251 if py2exehacked:
1251 if py2exehacked:
1252 from distutils.command.py2exe import py2exe as buildpy2exe
1252 from distutils.command.py2exe import py2exe as buildpy2exe
1253 from py2exe.mf import Module as py2exemodule
1253 from py2exe.mf import Module as py2exemodule
1254
1254
1255 class hgbuildpy2exe(buildpy2exe):
1255 class hgbuildpy2exe(buildpy2exe):
1256 def find_needed_modules(self, mf, files, modules):
1256 def find_needed_modules(self, mf, files, modules):
1257 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1257 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1258
1258
1259 # Replace virtualenv's distutils modules with the real ones.
1259 # Replace virtualenv's distutils modules with the real ones.
1260 modules = {}
1260 modules = {}
1261 for k, v in res.modules.items():
1261 for k, v in res.modules.items():
1262 if k != 'distutils' and not k.startswith('distutils.'):
1262 if k != 'distutils' and not k.startswith('distutils.'):
1263 modules[k] = v
1263 modules[k] = v
1264
1264
1265 res.modules = modules
1265 res.modules = modules
1266
1266
1267 import opcode
1267 import opcode
1268
1268
1269 distutilsreal = os.path.join(
1269 distutilsreal = os.path.join(
1270 os.path.dirname(opcode.__file__), 'distutils'
1270 os.path.dirname(opcode.__file__), 'distutils'
1271 )
1271 )
1272
1272
1273 for root, dirs, files in os.walk(distutilsreal):
1273 for root, dirs, files in os.walk(distutilsreal):
1274 for f in sorted(files):
1274 for f in sorted(files):
1275 if not f.endswith('.py'):
1275 if not f.endswith('.py'):
1276 continue
1276 continue
1277
1277
1278 full = os.path.join(root, f)
1278 full = os.path.join(root, f)
1279
1279
1280 parents = ['distutils']
1280 parents = ['distutils']
1281
1281
1282 if root != distutilsreal:
1282 if root != distutilsreal:
1283 rel = os.path.relpath(root, distutilsreal)
1283 rel = os.path.relpath(root, distutilsreal)
1284 parents.extend(p for p in rel.split(os.sep))
1284 parents.extend(p for p in rel.split(os.sep))
1285
1285
1286 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1286 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1287
1287
1288 if modname.startswith('distutils.tests.'):
1288 if modname.startswith('distutils.tests.'):
1289 continue
1289 continue
1290
1290
1291 if modname.endswith('.__init__'):
1291 if modname.endswith('.__init__'):
1292 modname = modname[: -len('.__init__')]
1292 modname = modname[: -len('.__init__')]
1293 path = os.path.dirname(full)
1293 path = os.path.dirname(full)
1294 else:
1294 else:
1295 path = None
1295 path = None
1296
1296
1297 res.modules[modname] = py2exemodule(
1297 res.modules[modname] = py2exemodule(
1298 modname, full, path=path
1298 modname, full, path=path
1299 )
1299 )
1300
1300
1301 if 'distutils' not in res.modules:
1301 if 'distutils' not in res.modules:
1302 raise SystemExit('could not find distutils modules')
1302 raise SystemExit('could not find distutils modules')
1303
1303
1304 return res
1304 return res
1305
1305
1306
1306
1307 cmdclass = {
1307 cmdclass = {
1308 'build': hgbuild,
1308 'build': hgbuild,
1309 'build_doc': hgbuilddoc,
1309 'build_doc': hgbuilddoc,
1310 'build_mo': hgbuildmo,
1310 'build_mo': hgbuildmo,
1311 'build_ext': hgbuildext,
1311 'build_ext': hgbuildext,
1312 'build_py': hgbuildpy,
1312 'build_py': hgbuildpy,
1313 'build_scripts': hgbuildscripts,
1313 'build_scripts': hgbuildscripts,
1314 'build_hgextindex': buildhgextindex,
1314 'build_hgextindex': buildhgextindex,
1315 'install': hginstall,
1315 'install': hginstall,
1316 'install_completion': hginstallcompletion,
1316 'install_completion': hginstallcompletion,
1317 'install_lib': hginstalllib,
1317 'install_lib': hginstalllib,
1318 'install_scripts': hginstallscripts,
1318 'install_scripts': hginstallscripts,
1319 'build_hgexe': buildhgexe,
1319 'build_hgexe': buildhgexe,
1320 }
1320 }
1321
1321
1322 if py2exehacked:
1322 if py2exehacked:
1323 cmdclass['py2exe'] = hgbuildpy2exe
1323 cmdclass['py2exe'] = hgbuildpy2exe
1324
1324
1325 packages = [
1325 packages = [
1326 'mercurial',
1326 'mercurial',
1327 'mercurial.admin',
1327 'mercurial.admin',
1328 'mercurial.cext',
1328 'mercurial.cext',
1329 'mercurial.cffi',
1329 'mercurial.cffi',
1330 'mercurial.defaultrc',
1330 'mercurial.defaultrc',
1331 'mercurial.dirstateutils',
1331 'mercurial.dirstateutils',
1332 'mercurial.helptext',
1332 'mercurial.helptext',
1333 'mercurial.helptext.internals',
1333 'mercurial.helptext.internals',
1334 'mercurial.hgweb',
1334 'mercurial.hgweb',
1335 'mercurial.interfaces',
1335 'mercurial.interfaces',
1336 'mercurial.pure',
1336 'mercurial.pure',
1337 'mercurial.stabletailgraph',
1337 'mercurial.stabletailgraph',
1338 'mercurial.templates',
1338 'mercurial.templates',
1339 'mercurial.thirdparty',
1339 'mercurial.thirdparty',
1340 'mercurial.thirdparty.attr',
1340 'mercurial.thirdparty.attr',
1341 'mercurial.thirdparty.tomli',
1341 'mercurial.thirdparty.tomli',
1342 'mercurial.thirdparty.zope',
1342 'mercurial.thirdparty.zope',
1343 'mercurial.thirdparty.zope.interface',
1343 'mercurial.thirdparty.zope.interface',
1344 'mercurial.upgrade_utils',
1344 'mercurial.upgrade_utils',
1345 'mercurial.utils',
1345 'mercurial.utils',
1346 'mercurial.revlogutils',
1346 'mercurial.revlogutils',
1347 'mercurial.testing',
1347 'mercurial.testing',
1348 'hgext',
1348 'hgext',
1349 'hgext.convert',
1349 'hgext.convert',
1350 'hgext.fsmonitor',
1350 'hgext.fsmonitor',
1351 'hgext.fastannotate',
1351 'hgext.fastannotate',
1352 'hgext.fsmonitor.pywatchman',
1352 'hgext.fsmonitor.pywatchman',
1353 'hgext.git',
1353 'hgext.git',
1354 'hgext.highlight',
1354 'hgext.highlight',
1355 'hgext.hooklib',
1355 'hgext.hooklib',
1356 'hgext.largefiles',
1356 'hgext.largefiles',
1357 'hgext.lfs',
1357 'hgext.lfs',
1358 'hgext.narrow',
1358 'hgext.narrow',
1359 'hgext.remotefilelog',
1359 'hgext.remotefilelog',
1360 'hgext.zeroconf',
1360 'hgext.zeroconf',
1361 'hgext3rd',
1361 'hgext3rd',
1362 'hgdemandimport',
1362 'hgdemandimport',
1363 ]
1363 ]
1364
1364
1365 for name in os.listdir(os.path.join('mercurial', 'templates')):
1365 for name in os.listdir(os.path.join('mercurial', 'templates')):
1366 if name != '__pycache__' and os.path.isdir(
1366 if name != '__pycache__' and os.path.isdir(
1367 os.path.join('mercurial', 'templates', name)
1367 os.path.join('mercurial', 'templates', name)
1368 ):
1368 ):
1369 packages.append('mercurial.templates.%s' % name)
1369 packages.append('mercurial.templates.%s' % name)
1370
1370
1371 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1371 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1372 # py2exe can't cope with namespace packages very well, so we have to
1372 # py2exe can't cope with namespace packages very well, so we have to
1373 # install any hgext3rd.* extensions that we want in the final py2exe
1373 # install any hgext3rd.* extensions that we want in the final py2exe
1374 # image here. This is gross, but you gotta do what you gotta do.
1374 # image here. This is gross, but you gotta do what you gotta do.
1375 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1375 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1376
1376
1377 common_depends = [
1377 common_depends = [
1378 'mercurial/bitmanipulation.h',
1378 'mercurial/bitmanipulation.h',
1379 'mercurial/compat.h',
1379 'mercurial/compat.h',
1380 'mercurial/cext/util.h',
1380 'mercurial/cext/util.h',
1381 ]
1381 ]
1382 common_include_dirs = ['mercurial']
1382 common_include_dirs = ['mercurial']
1383
1383
1384 common_cflags = []
1384 common_cflags = []
1385
1385
1386 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1386 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1387 # makes declarations not at the top of a scope in the headers.
1387 # makes declarations not at the top of a scope in the headers.
1388 if os.name != 'nt' and sys.version_info[1] < 9:
1388 if os.name != 'nt' and sys.version_info[1] < 9:
1389 common_cflags = ['-Werror=declaration-after-statement']
1389 common_cflags = ['-Werror=declaration-after-statement']
1390
1390
1391 osutil_cflags = []
1391 osutil_cflags = []
1392 osutil_ldflags = []
1392 osutil_ldflags = []
1393
1393
1394 # platform specific macros
1394 # platform specific macros
1395 for plat, func in [('bsd', 'setproctitle')]:
1395 for plat, func in [('bsd', 'setproctitle')]:
1396 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1396 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1397 osutil_cflags.append('-DHAVE_%s' % func.upper())
1397 osutil_cflags.append('-DHAVE_%s' % func.upper())
1398
1398
1399 for plat, macro, code in [
1399 for plat, macro, code in [
1400 (
1400 (
1401 'bsd|darwin',
1401 'bsd|darwin',
1402 'BSD_STATFS',
1402 'BSD_STATFS',
1403 '''
1403 '''
1404 #include <sys/param.h>
1404 #include <sys/param.h>
1405 #include <sys/mount.h>
1405 #include <sys/mount.h>
1406 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1406 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1407 ''',
1407 ''',
1408 ),
1408 ),
1409 (
1409 (
1410 'linux',
1410 'linux',
1411 'LINUX_STATFS',
1411 'LINUX_STATFS',
1412 '''
1412 '''
1413 #include <linux/magic.h>
1413 #include <linux/magic.h>
1414 #include <sys/vfs.h>
1414 #include <sys/vfs.h>
1415 int main() { struct statfs s; return sizeof(s.f_type); }
1415 int main() { struct statfs s; return sizeof(s.f_type); }
1416 ''',
1416 ''',
1417 ),
1417 ),
1418 ]:
1418 ]:
1419 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1419 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1420 osutil_cflags.append('-DHAVE_%s' % macro)
1420 osutil_cflags.append('-DHAVE_%s' % macro)
1421
1421
1422 if sys.platform == 'darwin':
1422 if sys.platform == 'darwin':
1423 osutil_ldflags += ['-framework', 'ApplicationServices']
1423 osutil_ldflags += ['-framework', 'ApplicationServices']
1424
1424
1425 if sys.platform == 'sunos5':
1425 if sys.platform == 'sunos5':
1426 osutil_ldflags += ['-lsocket']
1426 osutil_ldflags += ['-lsocket']
1427
1427
1428 xdiff_srcs = [
1428 xdiff_srcs = [
1429 'mercurial/thirdparty/xdiff/xdiffi.c',
1429 'mercurial/thirdparty/xdiff/xdiffi.c',
1430 'mercurial/thirdparty/xdiff/xprepare.c',
1430 'mercurial/thirdparty/xdiff/xprepare.c',
1431 'mercurial/thirdparty/xdiff/xutils.c',
1431 'mercurial/thirdparty/xdiff/xutils.c',
1432 ]
1432 ]
1433
1433
1434 xdiff_headers = [
1434 xdiff_headers = [
1435 'mercurial/thirdparty/xdiff/xdiff.h',
1435 'mercurial/thirdparty/xdiff/xdiff.h',
1436 'mercurial/thirdparty/xdiff/xdiffi.h',
1436 'mercurial/thirdparty/xdiff/xdiffi.h',
1437 'mercurial/thirdparty/xdiff/xinclude.h',
1437 'mercurial/thirdparty/xdiff/xinclude.h',
1438 'mercurial/thirdparty/xdiff/xmacros.h',
1438 'mercurial/thirdparty/xdiff/xmacros.h',
1439 'mercurial/thirdparty/xdiff/xprepare.h',
1439 'mercurial/thirdparty/xdiff/xprepare.h',
1440 'mercurial/thirdparty/xdiff/xtypes.h',
1440 'mercurial/thirdparty/xdiff/xtypes.h',
1441 'mercurial/thirdparty/xdiff/xutils.h',
1441 'mercurial/thirdparty/xdiff/xutils.h',
1442 ]
1442 ]
1443
1443
1444
1444
1445 class RustCompilationError(CCompilerError):
1445 class RustCompilationError(CCompilerError):
1446 """Exception class for Rust compilation errors."""
1446 """Exception class for Rust compilation errors."""
1447
1447
1448
1448
1449 class RustExtension(Extension):
1449 class RustExtension(Extension):
1450 """Base classes for concrete Rust Extension classes."""
1450 """Base classes for concrete Rust Extension classes."""
1451
1451
1452 rusttargetdir = os.path.join('rust', 'target', 'release')
1452 rusttargetdir = os.path.join('rust', 'target', 'release')
1453
1453
1454 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1454 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1455 Extension.__init__(self, mpath, sources, **kw)
1455 Extension.__init__(self, mpath, sources, **kw)
1456 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1456 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1457
1457
1458 # adding Rust source and control files to depends so that the extension
1458 # adding Rust source and control files to depends so that the extension
1459 # gets rebuilt if they've changed
1459 # gets rebuilt if they've changed
1460 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1460 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1461 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1461 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1462 if os.path.exists(cargo_lock):
1462 if os.path.exists(cargo_lock):
1463 self.depends.append(cargo_lock)
1463 self.depends.append(cargo_lock)
1464 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1464 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1465 self.depends.extend(
1465 self.depends.extend(
1466 os.path.join(dirpath, fname)
1466 os.path.join(dirpath, fname)
1467 for fname in fnames
1467 for fname in fnames
1468 if os.path.splitext(fname)[1] == '.rs'
1468 if os.path.splitext(fname)[1] == '.rs'
1469 )
1469 )
1470
1470
1471 @staticmethod
1471 @staticmethod
1472 def rustdylibsuffix():
1472 def rustdylibsuffix():
1473 """Return the suffix for shared libraries produced by rustc.
1473 """Return the suffix for shared libraries produced by rustc.
1474
1474
1475 See also: https://doc.rust-lang.org/reference/linkage.html
1475 See also: https://doc.rust-lang.org/reference/linkage.html
1476 """
1476 """
1477 if sys.platform == 'darwin':
1477 if sys.platform == 'darwin':
1478 return '.dylib'
1478 return '.dylib'
1479 elif os.name == 'nt':
1479 elif os.name == 'nt':
1480 return '.dll'
1480 return '.dll'
1481 else:
1481 else:
1482 return '.so'
1482 return '.so'
1483
1483
1484 def rustbuild(self):
1484 def rustbuild(self):
1485 env = os.environ.copy()
1485 env = os.environ.copy()
1486 if 'HGTEST_RESTOREENV' in env:
1486 if 'HGTEST_RESTOREENV' in env:
1487 # Mercurial tests change HOME to a temporary directory,
1487 # Mercurial tests change HOME to a temporary directory,
1488 # but, if installed with rustup, the Rust toolchain needs
1488 # but, if installed with rustup, the Rust toolchain needs
1489 # HOME to be correct (otherwise the 'no default toolchain'
1489 # HOME to be correct (otherwise the 'no default toolchain'
1490 # error message is issued and the build fails).
1490 # error message is issued and the build fails).
1491 # This happens currently with test-hghave.t, which does
1491 # This happens currently with test-hghave.t, which does
1492 # invoke this build.
1492 # invoke this build.
1493
1493
1494 # Unix only fix (os.path.expanduser not really reliable if
1494 # Unix only fix (os.path.expanduser not really reliable if
1495 # HOME is shadowed like this)
1495 # HOME is shadowed like this)
1496 import pwd
1496 import pwd
1497
1497
1498 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1498 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1499
1499
1500 cargocmd = ['cargo', 'rustc', '--release']
1500 cargocmd = ['cargo', 'rustc', '--release']
1501
1501
1502 rust_features = env.get("HG_RUST_FEATURES")
1502 rust_features = env.get("HG_RUST_FEATURES")
1503 if rust_features:
1503 if rust_features:
1504 cargocmd.extend(('--features', rust_features))
1504 cargocmd.extend(('--features', rust_features))
1505
1505
1506 cargocmd.append('--')
1506 cargocmd.append('--')
1507 if sys.platform == 'darwin':
1507 if sys.platform == 'darwin':
1508 cargocmd.extend(
1508 cargocmd.extend(
1509 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1509 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1510 )
1510 )
1511 try:
1511 try:
1512 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1512 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1513 except FileNotFoundError:
1513 except FileNotFoundError:
1514 raise RustCompilationError("Cargo not found")
1514 raise RustCompilationError("Cargo not found")
1515 except PermissionError:
1515 except PermissionError:
1516 raise RustCompilationError(
1516 raise RustCompilationError(
1517 "Cargo found, but permission to execute it is denied"
1517 "Cargo found, but permission to execute it is denied"
1518 )
1518 )
1519 except subprocess.CalledProcessError:
1519 except subprocess.CalledProcessError:
1520 raise RustCompilationError(
1520 raise RustCompilationError(
1521 "Cargo failed. Working directory: %r, "
1521 "Cargo failed. Working directory: %r, "
1522 "command: %r, environment: %r"
1522 "command: %r, environment: %r"
1523 % (self.rustsrcdir, cargocmd, env)
1523 % (self.rustsrcdir, cargocmd, env)
1524 )
1524 )
1525
1525
1526
1526
1527 class RustStandaloneExtension(RustExtension):
1527 class RustStandaloneExtension(RustExtension):
1528 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1528 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1529 RustExtension.__init__(
1529 RustExtension.__init__(
1530 self, pydottedname, [], dylibname, rustcrate, **kw
1530 self, pydottedname, [], dylibname, rustcrate, **kw
1531 )
1531 )
1532 self.dylibname = dylibname
1532 self.dylibname = dylibname
1533
1533
1534 def build(self, target_dir):
1534 def build(self, target_dir):
1535 self.rustbuild()
1535 self.rustbuild()
1536 target = [target_dir]
1536 target = [target_dir]
1537 target.extend(self.name.split('.'))
1537 target.extend(self.name.split('.'))
1538 target[-1] += DYLIB_SUFFIX
1538 target[-1] += DYLIB_SUFFIX
1539 target = os.path.join(*target)
1539 target = os.path.join(*target)
1540 os.makedirs(os.path.dirname(target), exist_ok=True)
1540 os.makedirs(os.path.dirname(target), exist_ok=True)
1541 shutil.copy2(
1541 shutil.copy2(
1542 os.path.join(
1542 os.path.join(
1543 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1543 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1544 ),
1544 ),
1545 target,
1545 target,
1546 )
1546 )
1547
1547
1548
1548
1549 extmodules = [
1549 extmodules = [
1550 Extension(
1550 Extension(
1551 'mercurial.cext.base85',
1551 'mercurial.cext.base85',
1552 ['mercurial/cext/base85.c'],
1552 ['mercurial/cext/base85.c'],
1553 include_dirs=common_include_dirs,
1553 include_dirs=common_include_dirs,
1554 extra_compile_args=common_cflags,
1554 extra_compile_args=common_cflags,
1555 depends=common_depends,
1555 depends=common_depends,
1556 ),
1556 ),
1557 Extension(
1557 Extension(
1558 'mercurial.cext.bdiff',
1558 'mercurial.cext.bdiff',
1559 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1559 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1560 include_dirs=common_include_dirs,
1560 include_dirs=common_include_dirs,
1561 extra_compile_args=common_cflags,
1561 extra_compile_args=common_cflags,
1562 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1562 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1563 ),
1563 ),
1564 Extension(
1564 Extension(
1565 'mercurial.cext.mpatch',
1565 'mercurial.cext.mpatch',
1566 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1566 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1567 include_dirs=common_include_dirs,
1567 include_dirs=common_include_dirs,
1568 extra_compile_args=common_cflags,
1568 extra_compile_args=common_cflags,
1569 depends=common_depends,
1569 depends=common_depends,
1570 ),
1570 ),
1571 Extension(
1571 Extension(
1572 'mercurial.cext.parsers',
1572 'mercurial.cext.parsers',
1573 [
1573 [
1574 'mercurial/cext/charencode.c',
1574 'mercurial/cext/charencode.c',
1575 'mercurial/cext/dirs.c',
1575 'mercurial/cext/dirs.c',
1576 'mercurial/cext/manifest.c',
1576 'mercurial/cext/manifest.c',
1577 'mercurial/cext/parsers.c',
1577 'mercurial/cext/parsers.c',
1578 'mercurial/cext/pathencode.c',
1578 'mercurial/cext/pathencode.c',
1579 'mercurial/cext/revlog.c',
1579 'mercurial/cext/revlog.c',
1580 ],
1580 ],
1581 include_dirs=common_include_dirs,
1581 include_dirs=common_include_dirs,
1582 extra_compile_args=common_cflags,
1582 extra_compile_args=common_cflags,
1583 depends=common_depends
1583 depends=common_depends
1584 + [
1584 + [
1585 'mercurial/cext/charencode.h',
1585 'mercurial/cext/charencode.h',
1586 'mercurial/cext/revlog.h',
1586 'mercurial/cext/revlog.h',
1587 ],
1587 ],
1588 ),
1588 ),
1589 Extension(
1589 Extension(
1590 'mercurial.cext.osutil',
1590 'mercurial.cext.osutil',
1591 ['mercurial/cext/osutil.c'],
1591 ['mercurial/cext/osutil.c'],
1592 include_dirs=common_include_dirs,
1592 include_dirs=common_include_dirs,
1593 extra_compile_args=common_cflags + osutil_cflags,
1593 extra_compile_args=common_cflags + osutil_cflags,
1594 extra_link_args=osutil_ldflags,
1594 extra_link_args=osutil_ldflags,
1595 depends=common_depends,
1595 depends=common_depends,
1596 ),
1596 ),
1597 Extension(
1597 Extension(
1598 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1598 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1599 [
1599 [
1600 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1600 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1601 ],
1601 ],
1602 extra_compile_args=common_cflags,
1602 extra_compile_args=common_cflags,
1603 ),
1603 ),
1604 Extension(
1604 Extension(
1605 'mercurial.thirdparty.sha1dc',
1605 'mercurial.thirdparty.sha1dc',
1606 [
1606 [
1607 'mercurial/thirdparty/sha1dc/cext.c',
1607 'mercurial/thirdparty/sha1dc/cext.c',
1608 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1608 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1609 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1609 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1610 ],
1610 ],
1611 extra_compile_args=common_cflags,
1611 extra_compile_args=common_cflags,
1612 ),
1612 ),
1613 Extension(
1613 Extension(
1614 'hgext.fsmonitor.pywatchman.bser',
1614 'hgext.fsmonitor.pywatchman.bser',
1615 ['hgext/fsmonitor/pywatchman/bser.c'],
1615 ['hgext/fsmonitor/pywatchman/bser.c'],
1616 extra_compile_args=common_cflags,
1616 extra_compile_args=common_cflags,
1617 ),
1617 ),
1618 RustStandaloneExtension(
1618 RustStandaloneExtension(
1619 'mercurial.rustext',
1619 'mercurial.rustext',
1620 'hg-cpython',
1620 'hg-cpython',
1621 'librusthg',
1621 'librusthg',
1622 ),
1622 ),
1623 ]
1623 ]
1624
1624
1625
1625
1626 sys.path.insert(0, 'contrib/python-zstandard')
1626 sys.path.insert(0, 'contrib/python-zstandard')
1627 import setup_zstd
1627 import setup_zstd
1628
1628
1629 zstd = setup_zstd.get_c_extension(
1629 zstd = setup_zstd.get_c_extension(
1630 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1630 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1631 )
1631 )
1632 zstd.extra_compile_args += common_cflags
1632 zstd.extra_compile_args += common_cflags
1633 extmodules.append(zstd)
1633 extmodules.append(zstd)
1634
1634
1635 try:
1635 try:
1636 from distutils import cygwinccompiler
1636 from distutils import cygwinccompiler
1637
1637
1638 # the -mno-cygwin option has been deprecated for years
1638 # the -mno-cygwin option has been deprecated for years
1639 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1639 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1640
1640
1641 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1641 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1642 def __init__(self, *args, **kwargs):
1642 def __init__(self, *args, **kwargs):
1643 mingw32compilerclass.__init__(self, *args, **kwargs)
1643 mingw32compilerclass.__init__(self, *args, **kwargs)
1644 for i in 'compiler compiler_so linker_exe linker_so'.split():
1644 for i in 'compiler compiler_so linker_exe linker_so'.split():
1645 try:
1645 try:
1646 getattr(self, i).remove('-mno-cygwin')
1646 getattr(self, i).remove('-mno-cygwin')
1647 except ValueError:
1647 except ValueError:
1648 pass
1648 pass
1649
1649
1650 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1650 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1651 except ImportError:
1651 except ImportError:
1652 # the cygwinccompiler package is not available on some Python
1652 # the cygwinccompiler package is not available on some Python
1653 # distributions like the ones from the optware project for Synology
1653 # distributions like the ones from the optware project for Synology
1654 # DiskStation boxes
1654 # DiskStation boxes
1655 class HackedMingw32CCompiler:
1655 class HackedMingw32CCompiler:
1656 pass
1656 pass
1657
1657
1658
1658
1659 if os.name == 'nt':
1659 if os.name == 'nt':
1660 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1660 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1661 # extra_link_args to distutils.extensions.Extension() doesn't have any
1661 # extra_link_args to distutils.extensions.Extension() doesn't have any
1662 # effect.
1662 # effect.
1663 from distutils import msvccompiler
1663 try:
1664 # setuptools < 65.0
1665 from distutils import msvccompiler
1666 except ImportError:
1667 from distutils import _msvccompiler as msvccompiler
1664
1668
1665 msvccompilerclass = msvccompiler.MSVCCompiler
1669 msvccompilerclass = msvccompiler.MSVCCompiler
1666
1670
1667 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1671 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1668 def initialize(self):
1672 def initialize(self):
1669 msvccompilerclass.initialize(self)
1673 msvccompilerclass.initialize(self)
1670 # "warning LNK4197: export 'func' specified multiple times"
1674 # "warning LNK4197: export 'func' specified multiple times"
1671 self.ldflags_shared.append('/ignore:4197')
1675 self.ldflags_shared.append('/ignore:4197')
1672 self.ldflags_shared_debug.append('/ignore:4197')
1676 self.ldflags_shared_debug.append('/ignore:4197')
1673
1677
1674 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1678 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1675
1679
1676 packagedata = {
1680 packagedata = {
1677 'mercurial': [
1681 'mercurial': [
1678 'configitems.toml',
1682 'configitems.toml',
1679 'locale/*/LC_MESSAGES/hg.mo',
1683 'locale/*/LC_MESSAGES/hg.mo',
1680 'dummycert.pem',
1684 'dummycert.pem',
1681 ],
1685 ],
1682 'mercurial.defaultrc': [
1686 'mercurial.defaultrc': [
1683 '*.rc',
1687 '*.rc',
1684 ],
1688 ],
1685 'mercurial.helptext': [
1689 'mercurial.helptext': [
1686 '*.txt',
1690 '*.txt',
1687 ],
1691 ],
1688 'mercurial.helptext.internals': [
1692 'mercurial.helptext.internals': [
1689 '*.txt',
1693 '*.txt',
1690 ],
1694 ],
1691 'mercurial.thirdparty.attr': [
1695 'mercurial.thirdparty.attr': [
1692 '*.pyi',
1696 '*.pyi',
1693 'py.typed',
1697 'py.typed',
1694 ],
1698 ],
1695 }
1699 }
1696
1700
1697
1701
1698 def ordinarypath(p):
1702 def ordinarypath(p):
1699 return p and p[0] != '.' and p[-1] != '~'
1703 return p and p[0] != '.' and p[-1] != '~'
1700
1704
1701
1705
1702 for root in ('templates',):
1706 for root in ('templates',):
1703 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1707 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1704 packagename = curdir.replace(os.sep, '.')
1708 packagename = curdir.replace(os.sep, '.')
1705 packagedata[packagename] = list(filter(ordinarypath, files))
1709 packagedata[packagename] = list(filter(ordinarypath, files))
1706
1710
1707 datafiles = []
1711 datafiles = []
1708
1712
1709 # distutils expects version to be str/unicode. Converting it to
1713 # distutils expects version to be str/unicode. Converting it to
1710 # unicode on Python 2 still works because it won't contain any
1714 # unicode on Python 2 still works because it won't contain any
1711 # non-ascii bytes and will be implicitly converted back to bytes
1715 # non-ascii bytes and will be implicitly converted back to bytes
1712 # when operated on.
1716 # when operated on.
1713 assert isinstance(version, str)
1717 assert isinstance(version, str)
1714 setupversion = version
1718 setupversion = version
1715
1719
1716 extra = {}
1720 extra = {}
1717
1721
1718 py2exepackages = [
1722 py2exepackages = [
1719 'hgdemandimport',
1723 'hgdemandimport',
1720 'hgext3rd',
1724 'hgext3rd',
1721 'hgext',
1725 'hgext',
1722 'email',
1726 'email',
1723 # implicitly imported per module policy
1727 # implicitly imported per module policy
1724 # (cffi wouldn't be used as a frozen exe)
1728 # (cffi wouldn't be used as a frozen exe)
1725 'mercurial.cext',
1729 'mercurial.cext',
1726 #'mercurial.cffi',
1730 #'mercurial.cffi',
1727 'mercurial.pure',
1731 'mercurial.pure',
1728 ]
1732 ]
1729
1733
1730 py2exe_includes = []
1734 py2exe_includes = []
1731
1735
1732 py2exeexcludes = []
1736 py2exeexcludes = []
1733 py2exedllexcludes = ['crypt32.dll']
1737 py2exedllexcludes = ['crypt32.dll']
1734
1738
1735 if issetuptools:
1739 if issetuptools:
1736 extra['python_requires'] = supportedpy
1740 extra['python_requires'] = supportedpy
1737
1741
1738 if py2exeloaded:
1742 if py2exeloaded:
1739 extra['console'] = [
1743 extra['console'] = [
1740 {
1744 {
1741 'script': 'hg',
1745 'script': 'hg',
1742 'copyright': 'Copyright (C) 2005-2024 Olivia Mackall and others',
1746 'copyright': 'Copyright (C) 2005-2024 Olivia Mackall and others',
1743 'product_version': version,
1747 'product_version': version,
1744 }
1748 }
1745 ]
1749 ]
1746 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1750 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1747 # Need to override hgbuild because it has a private copy of
1751 # Need to override hgbuild because it has a private copy of
1748 # build.sub_commands.
1752 # build.sub_commands.
1749 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1753 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1750 # put dlls in sub directory so that they won't pollute PATH
1754 # put dlls in sub directory so that they won't pollute PATH
1751 extra['zipfile'] = 'lib/library.zip'
1755 extra['zipfile'] = 'lib/library.zip'
1752
1756
1753 # We allow some configuration to be supplemented via environment
1757 # We allow some configuration to be supplemented via environment
1754 # variables. This is better than setup.cfg files because it allows
1758 # variables. This is better than setup.cfg files because it allows
1755 # supplementing configs instead of replacing them.
1759 # supplementing configs instead of replacing them.
1756 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1760 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1757 if extrapackages:
1761 if extrapackages:
1758 py2exepackages.extend(extrapackages.split(' '))
1762 py2exepackages.extend(extrapackages.split(' '))
1759
1763
1760 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1764 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1761 if extra_includes:
1765 if extra_includes:
1762 py2exe_includes.extend(extra_includes.split(' '))
1766 py2exe_includes.extend(extra_includes.split(' '))
1763
1767
1764 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1768 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1765 if excludes:
1769 if excludes:
1766 py2exeexcludes.extend(excludes.split(' '))
1770 py2exeexcludes.extend(excludes.split(' '))
1767
1771
1768 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1772 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1769 if dllexcludes:
1773 if dllexcludes:
1770 py2exedllexcludes.extend(dllexcludes.split(' '))
1774 py2exedllexcludes.extend(dllexcludes.split(' '))
1771
1775
1772 if os.environ.get('PYOXIDIZER'):
1776 if os.environ.get('PYOXIDIZER'):
1773 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1777 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1774
1778
1775 if os.name == 'nt':
1779 if os.name == 'nt':
1776 # Windows binary file versions for exe/dll files must have the
1780 # Windows binary file versions for exe/dll files must have the
1777 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1781 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1778 setupversion = setupversion.split(r'+', 1)[0]
1782 setupversion = setupversion.split(r'+', 1)[0]
1779
1783
1780 setup(
1784 setup(
1781 name='mercurial',
1785 name='mercurial',
1782 version=setupversion,
1786 version=setupversion,
1783 author='Olivia Mackall and many others',
1787 author='Olivia Mackall and many others',
1784 author_email='mercurial@mercurial-scm.org',
1788 author_email='mercurial@mercurial-scm.org',
1785 url='https://mercurial-scm.org/',
1789 url='https://mercurial-scm.org/',
1786 download_url='https://mercurial-scm.org/release/',
1790 download_url='https://mercurial-scm.org/release/',
1787 description=(
1791 description=(
1788 'Fast scalable distributed SCM (revision control, version '
1792 'Fast scalable distributed SCM (revision control, version '
1789 'control) system'
1793 'control) system'
1790 ),
1794 ),
1791 long_description=(
1795 long_description=(
1792 'Mercurial is a distributed SCM tool written in Python.'
1796 'Mercurial is a distributed SCM tool written in Python.'
1793 ' It is used by a number of large projects that require'
1797 ' It is used by a number of large projects that require'
1794 ' fast, reliable distributed revision control, such as '
1798 ' fast, reliable distributed revision control, such as '
1795 'Mozilla.'
1799 'Mozilla.'
1796 ),
1800 ),
1797 license='GNU GPLv2 or any later version',
1801 license='GNU GPLv2 or any later version',
1798 classifiers=[
1802 classifiers=[
1799 'Development Status :: 6 - Mature',
1803 'Development Status :: 6 - Mature',
1800 'Environment :: Console',
1804 'Environment :: Console',
1801 'Intended Audience :: Developers',
1805 'Intended Audience :: Developers',
1802 'Intended Audience :: System Administrators',
1806 'Intended Audience :: System Administrators',
1803 'License :: OSI Approved :: GNU General Public License (GPL)',
1807 'License :: OSI Approved :: GNU General Public License (GPL)',
1804 'Natural Language :: Danish',
1808 'Natural Language :: Danish',
1805 'Natural Language :: English',
1809 'Natural Language :: English',
1806 'Natural Language :: German',
1810 'Natural Language :: German',
1807 'Natural Language :: Italian',
1811 'Natural Language :: Italian',
1808 'Natural Language :: Japanese',
1812 'Natural Language :: Japanese',
1809 'Natural Language :: Portuguese (Brazilian)',
1813 'Natural Language :: Portuguese (Brazilian)',
1810 'Operating System :: Microsoft :: Windows',
1814 'Operating System :: Microsoft :: Windows',
1811 'Operating System :: OS Independent',
1815 'Operating System :: OS Independent',
1812 'Operating System :: POSIX',
1816 'Operating System :: POSIX',
1813 'Programming Language :: C',
1817 'Programming Language :: C',
1814 'Programming Language :: Python',
1818 'Programming Language :: Python',
1815 'Topic :: Software Development :: Version Control',
1819 'Topic :: Software Development :: Version Control',
1816 ],
1820 ],
1817 scripts=scripts,
1821 scripts=scripts,
1818 packages=packages,
1822 packages=packages,
1819 ext_modules=extmodules,
1823 ext_modules=extmodules,
1820 data_files=datafiles,
1824 data_files=datafiles,
1821 package_data=packagedata,
1825 package_data=packagedata,
1822 cmdclass=cmdclass,
1826 cmdclass=cmdclass,
1823 distclass=hgdist,
1827 distclass=hgdist,
1824 options={
1828 options={
1825 'py2exe': {
1829 'py2exe': {
1826 'bundle_files': 3,
1830 'bundle_files': 3,
1827 'dll_excludes': py2exedllexcludes,
1831 'dll_excludes': py2exedllexcludes,
1828 'includes': py2exe_includes,
1832 'includes': py2exe_includes,
1829 'excludes': py2exeexcludes,
1833 'excludes': py2exeexcludes,
1830 'packages': py2exepackages,
1834 'packages': py2exepackages,
1831 },
1835 },
1832 'bdist_mpkg': {
1836 'bdist_mpkg': {
1833 'zipdist': False,
1837 'zipdist': False,
1834 'license': 'COPYING',
1838 'license': 'COPYING',
1835 'readme': 'contrib/packaging/macosx/Readme.html',
1839 'readme': 'contrib/packaging/macosx/Readme.html',
1836 'welcome': 'contrib/packaging/macosx/Welcome.html',
1840 'welcome': 'contrib/packaging/macosx/Welcome.html',
1837 },
1841 },
1838 },
1842 },
1839 **extra,
1843 **extra,
1840 )
1844 )
@@ -1,23 +1,23
1 # A dummy extension that installs an hgweb command that throws an Exception.
1 # A dummy extension that installs an hgweb command that throws an Exception.
2
2
3
3
4 from mercurial.hgweb import webcommands
4 from mercurial.hgweb import webcommands
5
5
6
6
7 def raiseerror(web):
7 def raiseerror(web):
8 '''Dummy web command that raises an uncaught Exception.'''
8 '''Dummy web command that raises an uncaught Exception.'''
9
9
10 # Simulate an error after partial response.
10 # Simulate an error after partial response.
11 if b'partialresponse' in web.req.qsparams:
11 if b'partialresponse' in web.req.qsparams:
12 web.res.status = b'200 Script output follows'
12 web.res.status = b'200 Script output follows'
13 web.res.headers[b'Content-Type'] = b'text/plain'
13 web.res.headers[b'Content-Type'] = b'text/plain'
14 web.res.setbodywillwrite()
14 web.res.setbodywillwrite()
15 list(web.res.sendresponse())
15 list(web.res.sendresponse())
16 web.res.getbodyfile().write(b'partial content\n')
16 web.res.getbodyfile().write(b'partial content\n')
17
17
18 raise AttributeError('I am an uncaught error!')
18 raise AttributeError('I am an uncaught error!')
19
19
20
20
21 def extsetup(ui):
21 def extsetup(ui):
22 setattr(webcommands, 'raiseerror', raiseerror)
22 setattr(webcommands, 'raiseerror', raiseerror)
23 webcommands.__all__.append(b'raiseerror')
23 webcommands.__all__.append('raiseerror')
@@ -1,1879 +1,1884
1 This file used to contains all largefile tests.
1 This file used to contains all largefile tests.
2 Do not add any new tests in this file as it his already far too long to run.
2 Do not add any new tests in this file as it his already far too long to run.
3
3
4 It contains all the testing of the basic concepts of large file in a single block.
4 It contains all the testing of the basic concepts of large file in a single block.
5
5
6 $ USERCACHE="$TESTTMP/cache"; export USERCACHE
6 $ USERCACHE="$TESTTMP/cache"; export USERCACHE
7 $ mkdir "${USERCACHE}"
7 $ mkdir "${USERCACHE}"
8 $ cat >> $HGRCPATH <<EOF
8 $ cat >> $HGRCPATH <<EOF
9 > [extensions]
9 > [extensions]
10 > largefiles=
10 > largefiles=
11 > purge=
11 > purge=
12 > rebase=
12 > rebase=
13 > transplant=
13 > transplant=
14 > [phases]
14 > [phases]
15 > publish=False
15 > publish=False
16 > [largefiles]
16 > [largefiles]
17 > minsize=2
17 > minsize=2
18 > patterns=glob:**.dat
18 > patterns=glob:**.dat
19 > usercache=${USERCACHE}
19 > usercache=${USERCACHE}
20 > [hooks]
20 > [hooks]
21 > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status"
21 > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status"
22 > EOF
22 > EOF
23
23
24 Create the repo with a couple of revisions of both large and normal
24 Create the repo with a couple of revisions of both large and normal
25 files.
25 files.
26 Test status and dirstate of largefiles and that summary output is correct.
26 Test status and dirstate of largefiles and that summary output is correct.
27
27
28 $ hg init a
28 $ hg init a
29 $ cd a
29 $ cd a
30 $ mkdir sub
30 $ mkdir sub
31 $ echo normal1 > normal1
31 $ echo normal1 > normal1
32 $ echo normal2 > sub/normal2
32 $ echo normal2 > sub/normal2
33 $ echo large1 > large1
33 $ echo large1 > large1
34 $ echo large2 > sub/large2
34 $ echo large2 > sub/large2
35 $ hg add normal1 sub/normal2
35 $ hg add normal1 sub/normal2
36 $ hg add --large large1 sub/large2
36 $ hg add --large large1 sub/large2
37 $ hg commit -m "add files"
37 $ hg commit -m "add files"
38 Invoking status precommit hook
38 Invoking status precommit hook
39 A large1
39 A large1
40 A normal1
40 A normal1
41 A sub/large2
41 A sub/large2
42 A sub/normal2
42 A sub/normal2
43 $ touch large1 sub/large2
43 $ touch large1 sub/large2
44 $ sleep 1
44 $ sleep 1
45 $ hg st
45 $ hg st
46 $ hg debugstate --no-dates
46 $ hg debugstate --no-dates
47 n 644 41 set .hglf/large1
47 n 644 41 set .hglf/large1
48 n 644 41 set .hglf/sub/large2
48 n 644 41 set .hglf/sub/large2
49 n 644 8 set normal1
49 n 644 8 set normal1
50 n 644 8 set sub/normal2
50 n 644 8 set sub/normal2
51 $ hg debugstate --large --no-dates
51 $ hg debugstate --large --no-dates
52 n 644 7 set large1
52 n 644 7 set large1
53 n 644 7 set sub/large2
53 n 644 7 set sub/large2
54 $ echo normal11 > normal1
54 $ echo normal11 > normal1
55 $ echo normal22 > sub/normal2
55 $ echo normal22 > sub/normal2
56 $ echo large11 > large1
56 $ echo large11 > large1
57 $ echo large22 > sub/large2
57 $ echo large22 > sub/large2
58 $ hg commit -m "edit files"
58 $ hg commit -m "edit files"
59 Invoking status precommit hook
59 Invoking status precommit hook
60 M large1
60 M large1
61 M normal1
61 M normal1
62 M sub/large2
62 M sub/large2
63 M sub/normal2
63 M sub/normal2
64 $ hg sum --large
64 $ hg sum --large
65 parent: 1:ce8896473775 tip
65 parent: 1:ce8896473775 tip
66 edit files
66 edit files
67 branch: default
67 branch: default
68 commit: (clean)
68 commit: (clean)
69 update: (current)
69 update: (current)
70 phases: 2 draft
70 phases: 2 draft
71 largefiles: (no remote repo)
71 largefiles: (no remote repo)
72
72
73 Commit preserved largefile contents.
73 Commit preserved largefile contents.
74
74
75 $ cat normal1
75 $ cat normal1
76 normal11
76 normal11
77 $ cat large1
77 $ cat large1
78 large11
78 large11
79 $ cat sub/normal2
79 $ cat sub/normal2
80 normal22
80 normal22
81 $ cat sub/large2
81 $ cat sub/large2
82 large22
82 large22
83
83
84 Test status, subdir and unknown files
84 Test status, subdir and unknown files
85
85
86 $ echo unknown > sub/unknown
86 $ echo unknown > sub/unknown
87 $ hg st --all
87 $ hg st --all
88 ? sub/unknown
88 ? sub/unknown
89 C large1
89 C large1
90 C normal1
90 C normal1
91 C sub/large2
91 C sub/large2
92 C sub/normal2
92 C sub/normal2
93 $ hg st --all sub
93 $ hg st --all sub
94 ? sub/unknown
94 ? sub/unknown
95 C sub/large2
95 C sub/large2
96 C sub/normal2
96 C sub/normal2
97 $ rm sub/unknown
97 $ rm sub/unknown
98
98
99 Test messages and exit codes for remove warning cases
99 Test messages and exit codes for remove warning cases
100
100
101 $ hg remove -A large1
101 $ hg remove -A large1
102 not removing large1: file still exists
102 not removing large1: file still exists
103 [1]
103 [1]
104 $ echo 'modified' > large1
104 $ echo 'modified' > large1
105 $ hg remove large1
105 $ hg remove large1
106 not removing large1: file is modified (use -f to force removal)
106 not removing large1: file is modified (use -f to force removal)
107 [1]
107 [1]
108 $ echo 'new' > normalnew
108 $ echo 'new' > normalnew
109 $ hg add normalnew
109 $ hg add normalnew
110 $ echo 'new' > largenew
110 $ echo 'new' > largenew
111 $ hg add --large normalnew
111 $ hg add --large normalnew
112 normalnew already tracked!
112 normalnew already tracked!
113 $ hg remove normalnew largenew
113 $ hg remove normalnew largenew
114 not removing largenew: file is untracked
114 not removing largenew: file is untracked
115 not removing normalnew: file has been marked for add (use 'hg forget' to undo add)
115 not removing normalnew: file has been marked for add (use 'hg forget' to undo add)
116 [1]
116 [1]
117 $ rm normalnew largenew
117 $ rm normalnew largenew
118 $ hg up -Cq
118 $ hg up -Cq
119
119
120 Remove both largefiles and normal files.
120 Remove both largefiles and normal files.
121
121
122 $ hg remove normal1 large1
122 $ hg remove normal1 large1
123 $ hg status large1
123 $ hg status large1
124 R large1
124 R large1
125 $ hg commit -m "remove files"
125 $ hg commit -m "remove files"
126 Invoking status precommit hook
126 Invoking status precommit hook
127 R large1
127 R large1
128 R normal1
128 R normal1
129 $ ls -A
129 $ ls -A
130 .hg
130 .hg
131 .hglf
131 .hglf
132 sub
132 sub
133 $ echo "testlargefile" > large1-test
133 $ echo "testlargefile" > large1-test
134 $ hg add --large large1-test
134 $ hg add --large large1-test
135 $ hg st
135 $ hg st
136 A large1-test
136 A large1-test
137 $ hg rm large1-test
137 $ hg rm large1-test
138 not removing large1-test: file has been marked for add (use forget to undo)
138 not removing large1-test: file has been marked for add (use forget to undo)
139 [1]
139 [1]
140 $ hg st
140 $ hg st
141 A large1-test
141 A large1-test
142 $ hg forget large1-test
142 $ hg forget large1-test
143 $ hg st
143 $ hg st
144 ? large1-test
144 ? large1-test
145 $ hg remove large1-test
145 $ hg remove large1-test
146 not removing large1-test: file is untracked
146 not removing large1-test: file is untracked
147 [1]
147 [1]
148 $ hg forget large1-test
148 $ hg forget large1-test
149 not removing large1-test: file is already untracked
149 not removing large1-test: file is already untracked
150 [1]
150 [1]
151 $ rm large1-test
151 $ rm large1-test
152
152
153 Copy both largefiles and normal files (testing that status output is correct).
153 Copy both largefiles and normal files (testing that status output is correct).
154
154
155 $ hg cp sub/normal2 normal1
155 $ hg cp sub/normal2 normal1
156 $ hg cp sub/large2 large1
156 $ hg cp sub/large2 large1
157 $ hg commit -m "copy files"
157 $ hg commit -m "copy files"
158 Invoking status precommit hook
158 Invoking status precommit hook
159 A large1
159 A large1
160 A normal1
160 A normal1
161 $ cat normal1
161 $ cat normal1
162 normal22
162 normal22
163 $ cat large1
163 $ cat large1
164 large22
164 large22
165
165
166 Test moving largefiles and verify that normal files are also unaffected.
166 Test moving largefiles and verify that normal files are also unaffected.
167
167
168 $ hg mv normal1 normal3
168 $ hg mv normal1 normal3
169 $ hg mv large1 large3
169 $ hg mv large1 large3
170 $ hg mv sub/normal2 sub/normal4
170 $ hg mv sub/normal2 sub/normal4
171 $ hg mv sub/large2 sub/large4
171 $ hg mv sub/large2 sub/large4
172 $ hg commit -m "move files"
172 $ hg commit -m "move files"
173 Invoking status precommit hook
173 Invoking status precommit hook
174 A large3
174 A large3
175 A normal3
175 A normal3
176 A sub/large4
176 A sub/large4
177 A sub/normal4
177 A sub/normal4
178 R large1
178 R large1
179 R normal1
179 R normal1
180 R sub/large2
180 R sub/large2
181 R sub/normal2
181 R sub/normal2
182 $ cat normal3
182 $ cat normal3
183 normal22
183 normal22
184 $ cat large3
184 $ cat large3
185 large22
185 large22
186 $ cat sub/normal4
186 $ cat sub/normal4
187 normal22
187 normal22
188 $ cat sub/large4
188 $ cat sub/large4
189 large22
189 large22
190
190
191
191
192 #if serve
192 #if serve
193 Test display of largefiles in hgweb
193 Test display of largefiles in hgweb
194
194
195 $ hg serve -d -p $HGPORT --pid-file ../hg.pid
195 $ hg serve -d -p $HGPORT --pid-file ../hg.pid
196 $ cat ../hg.pid >> $DAEMON_PIDS
196 $ cat ../hg.pid >> $DAEMON_PIDS
197 $ get-with-headers.py $LOCALIP:$HGPORT 'file/tip/?style=raw'
197 $ get-with-headers.py $LOCALIP:$HGPORT 'file/tip/?style=raw'
198 200 Script output follows
198 200 Script output follows
199
199
200
200
201 drwxr-xr-x sub
201 drwxr-xr-x sub
202 -rw-r--r-- 41 large3
202 -rw-r--r-- 41 large3
203 -rw-r--r-- 9 normal3
203 -rw-r--r-- 9 normal3
204
204
205
205
206 $ get-with-headers.py $LOCALIP:$HGPORT 'file/tip/sub/?style=raw'
206 $ get-with-headers.py $LOCALIP:$HGPORT 'file/tip/sub/?style=raw'
207 200 Script output follows
207 200 Script output follows
208
208
209
209
210 -rw-r--r-- 41 large4
210 -rw-r--r-- 41 large4
211 -rw-r--r-- 9 normal4
211 -rw-r--r-- 9 normal4
212
212
213
213
214 $ killdaemons.py
214 $ killdaemons.py
215 #endif
215 #endif
216
216
217 Test largefiles can be loaded in hgweb (wrapcommand() shouldn't fail)
217 Test largefiles can be loaded in hgweb (wrapcommand() shouldn't fail)
218
218
219 $ cat <<EOF > "$TESTTMP/hgweb.cgi"
219 $ cat <<EOF > "$TESTTMP/hgweb.cgi"
220 > #!$PYTHON
220 > #!$PYTHON
221 > from mercurial import demandimport; demandimport.enable()
221 > from mercurial import demandimport; demandimport.enable()
222 > from mercurial.hgweb import hgweb
222 > from mercurial.hgweb import hgweb
223 > from mercurial.hgweb import wsgicgi
223 > from mercurial.hgweb import wsgicgi
224 > application = hgweb(b'.', b'test repo')
224 > application = hgweb(b'.', b'test repo')
225 > wsgicgi.launch(application)
225 > wsgicgi.launch(application)
226 > EOF
226 > EOF
227 $ . "$TESTDIR/cgienv"
227 $ . "$TESTDIR/cgienv"
228
228
229 $ SCRIPT_NAME='' \
229 $ SCRIPT_NAME='' \
230 > "$PYTHON" "$TESTTMP/hgweb.cgi" > /dev/null
230 > "$PYTHON" "$TESTTMP/hgweb.cgi" > /dev/null
231
231
232 Test archiving the various revisions. These hit corner cases known with
232 Test archiving the various revisions. These hit corner cases known with
233 archiving.
233 archiving.
234
234
235 $ hg archive -r 0 ../archive0
235 $ hg archive -r 0 ../archive0
236 $ hg archive -r 1 ../archive1
236 $ hg archive -r 1 ../archive1
237 $ hg archive -r 2 ../archive2
237 $ hg archive -r 2 ../archive2
238 $ hg archive -r 3 ../archive3
238 $ hg archive -r 3 ../archive3
239 $ hg archive -r 4 ../archive4
239 $ hg archive -r 4 ../archive4
240 $ cd ../archive0
240 $ cd ../archive0
241 $ cat normal1
241 $ cat normal1
242 normal1
242 normal1
243 $ cat large1
243 $ cat large1
244 large1
244 large1
245 $ cat sub/normal2
245 $ cat sub/normal2
246 normal2
246 normal2
247 $ cat sub/large2
247 $ cat sub/large2
248 large2
248 large2
249 $ cd ../archive1
249 $ cd ../archive1
250 $ cat normal1
250 $ cat normal1
251 normal11
251 normal11
252 $ cat large1
252 $ cat large1
253 large11
253 large11
254 $ cat sub/normal2
254 $ cat sub/normal2
255 normal22
255 normal22
256 $ cat sub/large2
256 $ cat sub/large2
257 large22
257 large22
258 $ cd ../archive2
258 $ cd ../archive2
259 $ ls -A
259 $ ls -A
260 .hg_archival.txt
260 .hg_archival.txt
261 sub
261 sub
262 $ cat sub/normal2
262 $ cat sub/normal2
263 normal22
263 normal22
264 $ cat sub/large2
264 $ cat sub/large2
265 large22
265 large22
266 $ cd ../archive3
266 $ cd ../archive3
267 $ cat normal1
267 $ cat normal1
268 normal22
268 normal22
269 $ cat large1
269 $ cat large1
270 large22
270 large22
271 $ cat sub/normal2
271 $ cat sub/normal2
272 normal22
272 normal22
273 $ cat sub/large2
273 $ cat sub/large2
274 large22
274 large22
275 $ cd ../archive4
275 $ cd ../archive4
276 $ cat normal3
276 $ cat normal3
277 normal22
277 normal22
278 $ cat large3
278 $ cat large3
279 large22
279 large22
280 $ cat sub/normal4
280 $ cat sub/normal4
281 normal22
281 normal22
282 $ cat sub/large4
282 $ cat sub/large4
283 large22
283 large22
284
284
285 Commit corner case: specify files to commit.
285 Commit corner case: specify files to commit.
286
286
287 $ cd ../a
287 $ cd ../a
288 $ echo normal3 > normal3
288 $ echo normal3 > normal3
289 $ echo large3 > large3
289 $ echo large3 > large3
290 $ echo normal4 > sub/normal4
290 $ echo normal4 > sub/normal4
291 $ echo large4 > sub/large4
291 $ echo large4 > sub/large4
292 $ hg commit normal3 large3 sub/normal4 sub/large4 -m "edit files again"
292 $ hg commit normal3 large3 sub/normal4 sub/large4 -m "edit files again"
293 Invoking status precommit hook
293 Invoking status precommit hook
294 M large3
294 M large3
295 M normal3
295 M normal3
296 M sub/large4
296 M sub/large4
297 M sub/normal4
297 M sub/normal4
298 $ cat normal3
298 $ cat normal3
299 normal3
299 normal3
300 $ cat large3
300 $ cat large3
301 large3
301 large3
302 $ cat sub/normal4
302 $ cat sub/normal4
303 normal4
303 normal4
304 $ cat sub/large4
304 $ cat sub/large4
305 large4
305 large4
306
306
307 One more commit corner case: commit from a subdirectory.
307 One more commit corner case: commit from a subdirectory.
308
308
309 $ cd ../a
309 $ cd ../a
310 $ echo normal33 > normal3
310 $ echo normal33 > normal3
311 $ echo large33 > large3
311 $ echo large33 > large3
312 $ echo normal44 > sub/normal4
312 $ echo normal44 > sub/normal4
313 $ echo large44 > sub/large4
313 $ echo large44 > sub/large4
314 $ cd sub
314 $ cd sub
315 $ hg commit -m "edit files yet again"
315 $ hg commit -m "edit files yet again"
316 Invoking status precommit hook
316 Invoking status precommit hook
317 M large3
317 M large3
318 M normal3
318 M normal3
319 M sub/large4
319 M sub/large4
320 M sub/normal4
320 M sub/normal4
321 $ cat ../normal3
321 $ cat ../normal3
322 normal33
322 normal33
323 $ cat ../large3
323 $ cat ../large3
324 large33
324 large33
325 $ cat normal4
325 $ cat normal4
326 normal44
326 normal44
327 $ cat large4
327 $ cat large4
328 large44
328 large44
329
329
330 Committing standins is not allowed.
330 Committing standins is not allowed.
331
331
332 $ cd ..
332 $ cd ..
333 $ echo large3 > large3
333 $ echo large3 > large3
334 $ hg commit .hglf/large3 -m "try to commit standin"
334 $ hg commit .hglf/large3 -m "try to commit standin"
335 abort: file ".hglf/large3" is a largefile standin
335 abort: file ".hglf/large3" is a largefile standin
336 (commit the largefile itself instead)
336 (commit the largefile itself instead)
337 [255]
337 [255]
338
338
339 Corner cases for adding largefiles.
339 Corner cases for adding largefiles.
340
340
341 $ echo large5 > large5
341 $ echo large5 > large5
342 $ hg add --large large5
342 $ hg add --large large5
343 $ hg add --large large5
343 $ hg add --large large5
344 large5 already a largefile
344 large5 already a largefile
345 $ mkdir sub2
345 $ mkdir sub2
346 $ echo large6 > sub2/large6
346 $ echo large6 > sub2/large6
347 $ echo large7 > sub2/large7
347 $ echo large7 > sub2/large7
348 $ hg add --large sub2
348 $ hg add --large sub2
349 adding sub2/large6 as a largefile
349 adding sub2/large6 as a largefile
350 adding sub2/large7 as a largefile
350 adding sub2/large7 as a largefile
351 $ hg st
351 $ hg st
352 M large3
352 M large3
353 A large5
353 A large5
354 A sub2/large6
354 A sub2/large6
355 A sub2/large7
355 A sub2/large7
356
356
357 Committing directories containing only largefiles.
357 Committing directories containing only largefiles.
358
358
359 $ mkdir -p z/y/x/m
359 $ mkdir -p z/y/x/m
360 $ touch z/y/x/m/large1
360 $ touch z/y/x/m/large1
361 $ touch z/y/x/large2
361 $ touch z/y/x/large2
362 $ hg add --large z/y/x/m/large1 z/y/x/large2
362 $ hg add --large z/y/x/m/large1 z/y/x/large2
363 $ hg commit -m "Subdir with directory only containing largefiles" z
363 $ hg commit -m "Subdir with directory only containing largefiles" z
364 Invoking status precommit hook
364 Invoking status precommit hook
365 M large3
365 M large3
366 A large5
366 A large5
367 A sub2/large6
367 A sub2/large6
368 A sub2/large7
368 A sub2/large7
369 A z/y/x/large2
369 A z/y/x/large2
370 A z/y/x/m/large1
370 A z/y/x/m/large1
371
371
372 (and a bit of log testing)
372 (and a bit of log testing)
373
373
374 $ hg log -T '{rev}\n' z/y/x/m/large1
374 $ hg log -T '{rev}\n' z/y/x/m/large1
375 7
375 7
376 $ hg log -T '{rev}\n' z/y/x/m # with only a largefile
376 $ hg log -T '{rev}\n' z/y/x/m # with only a largefile
377 7
377 7
378
378
379 $ hg rollback --quiet
379 $ hg rollback --quiet
380 $ touch z/y/x/m/normal
380 $ touch z/y/x/m/normal
381 $ hg add z/y/x/m/normal
381 $ hg add z/y/x/m/normal
382 $ hg commit -m "Subdir with mixed contents" z
382 $ hg commit -m "Subdir with mixed contents" z
383 Invoking status precommit hook
383 Invoking status precommit hook
384 M large3
384 M large3
385 A large5
385 A large5
386 A sub2/large6
386 A sub2/large6
387 A sub2/large7
387 A sub2/large7
388 A z/y/x/large2
388 A z/y/x/large2
389 A z/y/x/m/large1
389 A z/y/x/m/large1
390 A z/y/x/m/normal
390 A z/y/x/m/normal
391 $ hg st
391 $ hg st
392 M large3
392 M large3
393 A large5
393 A large5
394 A sub2/large6
394 A sub2/large6
395 A sub2/large7
395 A sub2/large7
396 $ hg rollback --quiet
396 $ hg rollback --quiet
397 $ hg revert z/y/x/large2 z/y/x/m/large1
397 $ hg revert z/y/x/large2 z/y/x/m/large1
398 $ rm z/y/x/large2 z/y/x/m/large1
398 $ rm z/y/x/large2 z/y/x/m/large1
399 $ hg commit -m "Subdir with normal contents" z
399 $ hg commit -m "Subdir with normal contents" z
400 Invoking status precommit hook
400 Invoking status precommit hook
401 M large3
401 M large3
402 A large5
402 A large5
403 A sub2/large6
403 A sub2/large6
404 A sub2/large7
404 A sub2/large7
405 A z/y/x/m/normal
405 A z/y/x/m/normal
406 $ hg st
406 $ hg st
407 M large3
407 M large3
408 A large5
408 A large5
409 A sub2/large6
409 A sub2/large6
410 A sub2/large7
410 A sub2/large7
411 $ hg rollback --quiet
411 $ hg rollback --quiet
412 $ hg revert --quiet z
412 $ hg revert --quiet z
413 $ hg commit -m "Empty subdir" z
413 $ hg commit -m "Empty subdir" z
414 abort: z: no match under directory!
414 abort: z: no match under directory!
415 [10]
415 [10]
416 $ rm -rf z
416 $ rm -rf z
417 $ hg ci -m "standin" .hglf
417 $ hg ci -m "standin" .hglf
418 abort: file ".hglf" is a largefile standin
418 abort: file ".hglf" is a largefile standin
419 (commit the largefile itself instead)
419 (commit the largefile itself instead)
420 [255]
420 [255]
421
421
422 Test "hg status" with combination of 'file pattern' and 'directory
422 Test "hg status" with combination of 'file pattern' and 'directory
423 pattern' for largefiles:
423 pattern' for largefiles:
424
424
425 $ hg status sub2/large6 sub2
425 $ hg status sub2/large6 sub2
426 A sub2/large6
426 A sub2/large6
427 A sub2/large7
427 A sub2/large7
428
428
429 Config settings (pattern **.dat, minsize 2 MB) are respected.
429 Config settings (pattern **.dat, minsize 2 MB) are respected.
430
430
431 $ echo testdata > test.dat
431 $ echo testdata > test.dat
432 $ dd bs=1k count=2k if=/dev/zero of=reallylarge > /dev/null 2> /dev/null
432 $ dd bs=1k count=2k if=/dev/zero of=reallylarge > /dev/null 2> /dev/null
433 $ hg add
433 $ hg add
434 adding reallylarge as a largefile
434 adding reallylarge as a largefile
435 adding test.dat as a largefile
435 adding test.dat as a largefile
436
436
437 Test that minsize and --lfsize handle float values;
437 Test that minsize and --lfsize handle float values;
438 also tests that --lfsize overrides largefiles.minsize.
438 also tests that --lfsize overrides largefiles.minsize.
439 (0.250 MB = 256 kB = 262144 B)
439 (0.250 MB = 256 kB = 262144 B)
440
440
441 $ dd if=/dev/zero of=ratherlarge bs=1024 count=256 > /dev/null 2> /dev/null
441 $ dd if=/dev/zero of=ratherlarge bs=1024 count=256 > /dev/null 2> /dev/null
442 $ dd if=/dev/zero of=medium bs=1024 count=128 > /dev/null 2> /dev/null
442 $ dd if=/dev/zero of=medium bs=1024 count=128 > /dev/null 2> /dev/null
443 $ hg --config largefiles.minsize=.25 add
443 $ hg --config largefiles.minsize=.25 add
444 adding ratherlarge as a largefile
444 adding ratherlarge as a largefile
445 adding medium
445 adding medium
446 $ hg forget medium
446 $ hg forget medium
447 $ hg --config largefiles.minsize=.25 add --lfsize=.125
447 $ hg --config largefiles.minsize=.25 add --lfsize=.125
448 adding medium as a largefile
448 adding medium as a largefile
449 $ dd if=/dev/zero of=notlarge bs=1024 count=127 > /dev/null 2> /dev/null
449 $ dd if=/dev/zero of=notlarge bs=1024 count=127 > /dev/null 2> /dev/null
450 $ hg --config largefiles.minsize=.25 add --lfsize=.125
450 $ hg --config largefiles.minsize=.25 add --lfsize=.125
451 adding notlarge
451 adding notlarge
452 $ hg forget notlarge
452 $ hg forget notlarge
453
453
454 Test forget on largefiles.
454 Test forget on largefiles.
455
455
456 $ hg forget large3 large5 test.dat reallylarge ratherlarge medium
456 $ hg forget large3 large5 test.dat reallylarge ratherlarge medium
457 $ hg commit -m "add/edit more largefiles"
457 $ hg commit -m "add/edit more largefiles"
458 Invoking status precommit hook
458 Invoking status precommit hook
459 A sub2/large6
459 A sub2/large6
460 A sub2/large7
460 A sub2/large7
461 R large3
461 R large3
462 ? large5
462 ? large5
463 ? medium
463 ? medium
464 ? notlarge
464 ? notlarge
465 ? ratherlarge
465 ? ratherlarge
466 ? reallylarge
466 ? reallylarge
467 ? test.dat
467 ? test.dat
468 $ hg st
468 $ hg st
469 ? large3
469 ? large3
470 ? large5
470 ? large5
471 ? medium
471 ? medium
472 ? notlarge
472 ? notlarge
473 ? ratherlarge
473 ? ratherlarge
474 ? reallylarge
474 ? reallylarge
475 ? test.dat
475 ? test.dat
476
476
477 Purge with largefiles: verify that largefiles are still in the working
477 Purge with largefiles: verify that largefiles are still in the working
478 dir after a purge.
478 dir after a purge.
479
479
480 $ hg purge --all
480 $ hg purge --all
481 $ cat sub/large4
481 $ cat sub/large4
482 large44
482 large44
483 $ cat sub2/large6
483 $ cat sub2/large6
484 large6
484 large6
485 $ cat sub2/large7
485 $ cat sub2/large7
486 large7
486 large7
487
487
488 Test addremove: verify that files that should be added as largefiles are added as
488 Test addremove: verify that files that should be added as largefiles are added as
489 such and that already-existing largefiles are not added as normal files by
489 such and that already-existing largefiles are not added as normal files by
490 accident.
490 accident.
491
491
492 $ rm normal3
492 $ rm normal3
493 $ rm sub/large4
493 $ rm sub/large4
494 $ echo "testing addremove with patterns" > testaddremove.dat
494 $ echo "testing addremove with patterns" > testaddremove.dat
495 $ echo "normaladdremove" > normaladdremove
495 $ echo "normaladdremove" > normaladdremove
496 $ hg addremove
496 $ hg addremove
497 removing sub/large4
497 removing sub/large4
498 adding testaddremove.dat as a largefile
498 adding testaddremove.dat as a largefile
499 removing normal3
499 removing normal3
500 adding normaladdremove
500 adding normaladdremove
501
501
502 Test addremove with -R
502 Test addremove with -R
503
503
504 $ hg up -C
504 $ hg up -C
505 getting changed largefiles
505 getting changed largefiles
506 1 largefiles updated, 0 removed
506 1 largefiles updated, 0 removed
507 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
507 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
508 $ rm normal3
508 $ rm normal3
509 $ rm sub/large4
509 $ rm sub/large4
510 $ echo "testing addremove with patterns" > testaddremove.dat
510 $ echo "testing addremove with patterns" > testaddremove.dat
511 $ echo "normaladdremove" > normaladdremove
511 $ echo "normaladdremove" > normaladdremove
512 $ cd ..
512 $ cd ..
513 $ hg -R a -v addremove
513 $ hg -R a -v addremove
514 removing sub/large4
514 removing sub/large4
515 adding testaddremove.dat as a largefile
515 adding testaddremove.dat as a largefile
516 removing normal3
516 removing normal3
517 adding normaladdremove
517 adding normaladdremove
518 $ cd a
518 $ cd a
519
519
520 Test 3364
520 Test 3364
521 $ hg clone . ../addrm
521 $ hg clone . ../addrm
522 updating to branch default
522 updating to branch default
523 getting changed largefiles
523 getting changed largefiles
524 3 largefiles updated, 0 removed
524 3 largefiles updated, 0 removed
525 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
525 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
526 $ cd ../addrm
526 $ cd ../addrm
527 $ cat >> .hg/hgrc <<EOF
527 $ cat >> .hg/hgrc <<EOF
528 > [hooks]
528 > [hooks]
529 > post-commit.stat=sh -c "echo \\"Invoking status postcommit hook\\"; hg status -A"
529 > post-commit.stat=sh -c "echo \\"Invoking status postcommit hook\\"; hg status -A"
530 > EOF
530 > EOF
531 $ touch foo
531 $ touch foo
532 $ hg add --large foo
532 $ hg add --large foo
533 $ hg ci -m "add foo"
533 $ hg ci -m "add foo"
534 Invoking status precommit hook
534 Invoking status precommit hook
535 A foo
535 A foo
536 Invoking status postcommit hook
536 Invoking status postcommit hook
537 C foo
537 C foo
538 C normal3
538 C normal3
539 C sub/large4
539 C sub/large4
540 C sub/normal4
540 C sub/normal4
541 C sub2/large6
541 C sub2/large6
542 C sub2/large7
542 C sub2/large7
543 $ rm foo
543 $ rm foo
544 $ hg st
544 $ hg st
545 ! foo
545 ! foo
546 hmm.. no precommit invoked, but there is a postcommit??
546 hmm.. no precommit invoked, but there is a postcommit??
547 $ hg ci -m "will not checkin"
547 $ hg ci -m "will not checkin"
548 nothing changed (1 missing files, see 'hg status')
548 nothing changed (1 missing files, see 'hg status')
549 Invoking status postcommit hook
549 Invoking status postcommit hook
550 ! foo
550 ! foo
551 C normal3
551 C normal3
552 C sub/large4
552 C sub/large4
553 C sub/normal4
553 C sub/normal4
554 C sub2/large6
554 C sub2/large6
555 C sub2/large7
555 C sub2/large7
556 [1]
556 [1]
557 $ hg addremove
557 $ hg addremove
558 removing foo
558 removing foo
559 $ hg st
559 $ hg st
560 R foo
560 R foo
561 $ hg ci -m "used to say nothing changed"
561 $ hg ci -m "used to say nothing changed"
562 Invoking status precommit hook
562 Invoking status precommit hook
563 R foo
563 R foo
564 Invoking status postcommit hook
564 Invoking status postcommit hook
565 C normal3
565 C normal3
566 C sub/large4
566 C sub/large4
567 C sub/normal4
567 C sub/normal4
568 C sub2/large6
568 C sub2/large6
569 C sub2/large7
569 C sub2/large7
570 $ hg st
570 $ hg st
571
571
572 Test 3507 (both normal files and largefiles were a problem)
572 Test 3507 (both normal files and largefiles were a problem)
573
573
574 $ touch normal
574 $ touch normal
575 $ touch large
575 $ touch large
576 $ hg add normal
576 $ hg add normal
577 $ hg add --large large
577 $ hg add --large large
578 $ hg ci -m "added"
578 $ hg ci -m "added"
579 Invoking status precommit hook
579 Invoking status precommit hook
580 A large
580 A large
581 A normal
581 A normal
582 Invoking status postcommit hook
582 Invoking status postcommit hook
583 C large
583 C large
584 C normal
584 C normal
585 C normal3
585 C normal3
586 C sub/large4
586 C sub/large4
587 C sub/normal4
587 C sub/normal4
588 C sub2/large6
588 C sub2/large6
589 C sub2/large7
589 C sub2/large7
590 $ hg remove normal
590 $ hg remove normal
591 $ hg addremove --traceback
591 $ hg addremove --traceback
592 $ hg ci -m "addremoved normal"
592 $ hg ci -m "addremoved normal"
593 Invoking status precommit hook
593 Invoking status precommit hook
594 R normal
594 R normal
595 Invoking status postcommit hook
595 Invoking status postcommit hook
596 C large
596 C large
597 C normal3
597 C normal3
598 C sub/large4
598 C sub/large4
599 C sub/normal4
599 C sub/normal4
600 C sub2/large6
600 C sub2/large6
601 C sub2/large7
601 C sub2/large7
602 $ hg up -C '.^'
602 $ hg up -C '.^'
603 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
603 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
604 $ hg remove large
604 $ hg remove large
605 $ hg addremove --traceback
605 $ hg addremove --traceback
606 $ hg ci -m "removed large"
606 $ hg ci -m "removed large"
607 Invoking status precommit hook
607 Invoking status precommit hook
608 R large
608 R large
609 created new head
609 created new head
610 Invoking status postcommit hook
610 Invoking status postcommit hook
611 C normal
611 C normal
612 C normal3
612 C normal3
613 C sub/large4
613 C sub/large4
614 C sub/normal4
614 C sub/normal4
615 C sub2/large6
615 C sub2/large6
616 C sub2/large7
616 C sub2/large7
617
617
618 Test commit -A (issue3542)
618 Test commit -A (issue3542)
619 $ echo large8 > large8
619 $ echo large8 > large8
620 $ hg add --large large8
620 $ hg add --large large8
621 $ hg ci -Am 'this used to add large8 as normal and commit both'
621 $ hg ci -Am 'this used to add large8 as normal and commit both'
622 Invoking status precommit hook
622 Invoking status precommit hook
623 A large8
623 A large8
624 Invoking status postcommit hook
624 Invoking status postcommit hook
625 C large8
625 C large8
626 C normal
626 C normal
627 C normal3
627 C normal3
628 C sub/large4
628 C sub/large4
629 C sub/normal4
629 C sub/normal4
630 C sub2/large6
630 C sub2/large6
631 C sub2/large7
631 C sub2/large7
632 $ rm large8
632 $ rm large8
633 $ hg ci -Am 'this used to not notice the rm'
633 $ hg ci -Am 'this used to not notice the rm'
634 removing large8
634 removing large8
635 Invoking status precommit hook
635 Invoking status precommit hook
636 R large8
636 R large8
637 Invoking status postcommit hook
637 Invoking status postcommit hook
638 C normal
638 C normal
639 C normal3
639 C normal3
640 C sub/large4
640 C sub/large4
641 C sub/normal4
641 C sub/normal4
642 C sub2/large6
642 C sub2/large6
643 C sub2/large7
643 C sub2/large7
644
644
645 Test that a standin can't be added as a large file
645 Test that a standin can't be added as a large file
646
646
647 $ touch large
647 $ touch large
648 $ hg add --large large
648 $ hg add --large large
649 $ hg ci -m "add"
649 $ hg ci -m "add"
650 Invoking status precommit hook
650 Invoking status precommit hook
651 A large
651 A large
652 Invoking status postcommit hook
652 Invoking status postcommit hook
653 C large
653 C large
654 C normal
654 C normal
655 C normal3
655 C normal3
656 C sub/large4
656 C sub/large4
657 C sub/normal4
657 C sub/normal4
658 C sub2/large6
658 C sub2/large6
659 C sub2/large7
659 C sub2/large7
660 $ hg remove large
660 $ hg remove large
661 $ touch large
661 $ touch large
662 $ hg addremove --config largefiles.patterns=**large --traceback
662 $ hg addremove --config largefiles.patterns=**large --traceback
663 adding large as a largefile
663 adding large as a largefile
664
664
665 Test that outgoing --large works (with revsets too)
665 Test that outgoing --large works (with revsets too)
666 $ hg outgoing --rev '.^' --large
666 $ hg outgoing --rev '.^' --large
667 comparing with $TESTTMP/a
667 comparing with $TESTTMP/a
668 searching for changes
668 searching for changes
669 changeset: 8:c02fd3b77ec4
669 changeset: 8:c02fd3b77ec4
670 user: test
670 user: test
671 date: Thu Jan 01 00:00:00 1970 +0000
671 date: Thu Jan 01 00:00:00 1970 +0000
672 summary: add foo
672 summary: add foo
673
673
674 changeset: 9:289dd08c9bbb
674 changeset: 9:289dd08c9bbb
675 user: test
675 user: test
676 date: Thu Jan 01 00:00:00 1970 +0000
676 date: Thu Jan 01 00:00:00 1970 +0000
677 summary: used to say nothing changed
677 summary: used to say nothing changed
678
678
679 changeset: 10:34f23ac6ac12
679 changeset: 10:34f23ac6ac12
680 user: test
680 user: test
681 date: Thu Jan 01 00:00:00 1970 +0000
681 date: Thu Jan 01 00:00:00 1970 +0000
682 summary: added
682 summary: added
683
683
684 changeset: 12:710c1b2f523c
684 changeset: 12:710c1b2f523c
685 parent: 10:34f23ac6ac12
685 parent: 10:34f23ac6ac12
686 user: test
686 user: test
687 date: Thu Jan 01 00:00:00 1970 +0000
687 date: Thu Jan 01 00:00:00 1970 +0000
688 summary: removed large
688 summary: removed large
689
689
690 changeset: 13:0a3e75774479
690 changeset: 13:0a3e75774479
691 user: test
691 user: test
692 date: Thu Jan 01 00:00:00 1970 +0000
692 date: Thu Jan 01 00:00:00 1970 +0000
693 summary: this used to add large8 as normal and commit both
693 summary: this used to add large8 as normal and commit both
694
694
695 changeset: 14:84f3d378175c
695 changeset: 14:84f3d378175c
696 user: test
696 user: test
697 date: Thu Jan 01 00:00:00 1970 +0000
697 date: Thu Jan 01 00:00:00 1970 +0000
698 summary: this used to not notice the rm
698 summary: this used to not notice the rm
699
699
700 largefiles to upload (1 entities):
700 largefiles to upload (1 entities):
701 large8
701 large8
702
702
703 $ cd ../a
703 $ cd ../a
704
704
705 Clone a largefiles repo.
705 Clone a largefiles repo.
706
706
707 $ hg clone . ../b
707 $ hg clone . ../b
708 updating to branch default
708 updating to branch default
709 getting changed largefiles
709 getting changed largefiles
710 3 largefiles updated, 0 removed
710 3 largefiles updated, 0 removed
711 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
711 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
712 $ cd ../b
712 $ cd ../b
713 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
713 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
714 7:daea875e9014 add/edit more largefiles
714 7:daea875e9014 add/edit more largefiles
715 6:4355d653f84f edit files yet again
715 6:4355d653f84f edit files yet again
716 5:9d5af5072dbd edit files again
716 5:9d5af5072dbd edit files again
717 4:74c02385b94c move files
717 4:74c02385b94c move files
718 3:9e8fbc4bce62 copy files
718 3:9e8fbc4bce62 copy files
719 2:51a0ae4d5864 remove files
719 2:51a0ae4d5864 remove files
720 1:ce8896473775 edit files
720 1:ce8896473775 edit files
721 0:30d30fe6a5be add files
721 0:30d30fe6a5be add files
722 $ cat normal3
722 $ cat normal3
723 normal33
723 normal33
724
724
725 Test graph log
725 Test graph log
726
726
727 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n'
727 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n'
728 @ 7:daea875e9014 add/edit more largefiles
728 @ 7:daea875e9014 add/edit more largefiles
729 |
729 |
730 o 6:4355d653f84f edit files yet again
730 o 6:4355d653f84f edit files yet again
731 |
731 |
732 o 5:9d5af5072dbd edit files again
732 o 5:9d5af5072dbd edit files again
733 |
733 |
734 o 4:74c02385b94c move files
734 o 4:74c02385b94c move files
735 |
735 |
736 o 3:9e8fbc4bce62 copy files
736 o 3:9e8fbc4bce62 copy files
737 |
737 |
738 o 2:51a0ae4d5864 remove files
738 o 2:51a0ae4d5864 remove files
739 |
739 |
740 o 1:ce8896473775 edit files
740 o 1:ce8896473775 edit files
741 |
741 |
742 o 0:30d30fe6a5be add files
742 o 0:30d30fe6a5be add files
743
743
744
744
745 Test log with --patch
745 Test log with --patch
746
746
747 $ hg log --patch -r 6::7
747 $ hg log --patch -r 6::7
748 changeset: 6:4355d653f84f
748 changeset: 6:4355d653f84f
749 user: test
749 user: test
750 date: Thu Jan 01 00:00:00 1970 +0000
750 date: Thu Jan 01 00:00:00 1970 +0000
751 summary: edit files yet again
751 summary: edit files yet again
752
752
753 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3
753 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3
754 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
754 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
755 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
755 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
756 @@ -1,1 +1,1 @@
756 @@ -1,1 +1,1 @@
757 -baaf12afde9d8d67f25dab6dced0d2bf77dba47c
757 -baaf12afde9d8d67f25dab6dced0d2bf77dba47c
758 +7838695e10da2bb75ac1156565f40a2595fa2fa0
758 +7838695e10da2bb75ac1156565f40a2595fa2fa0
759 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
759 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
760 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
760 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
761 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
761 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
762 @@ -1,1 +1,1 @@
762 @@ -1,1 +1,1 @@
763 -aeb2210d19f02886dde00dac279729a48471e2f9
763 -aeb2210d19f02886dde00dac279729a48471e2f9
764 +971fb41e78fea4f8e0ba5244784239371cb00591
764 +971fb41e78fea4f8e0ba5244784239371cb00591
765 diff -r 9d5af5072dbd -r 4355d653f84f normal3
765 diff -r 9d5af5072dbd -r 4355d653f84f normal3
766 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000
766 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000
767 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000
767 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000
768 @@ -1,1 +1,1 @@
768 @@ -1,1 +1,1 @@
769 -normal3
769 -normal3
770 +normal33
770 +normal33
771 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
771 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
772 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
772 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
773 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
773 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
774 @@ -1,1 +1,1 @@
774 @@ -1,1 +1,1 @@
775 -normal4
775 -normal4
776 +normal44
776 +normal44
777
777
778 changeset: 7:daea875e9014
778 changeset: 7:daea875e9014
779 tag: tip
779 tag: tip
780 user: test
780 user: test
781 date: Thu Jan 01 00:00:00 1970 +0000
781 date: Thu Jan 01 00:00:00 1970 +0000
782 summary: add/edit more largefiles
782 summary: add/edit more largefiles
783
783
784 diff -r 4355d653f84f -r daea875e9014 .hglf/large3
784 diff -r 4355d653f84f -r daea875e9014 .hglf/large3
785 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
785 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
786 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
786 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
787 @@ -1,1 +0,0 @@
787 @@ -1,1 +0,0 @@
788 -7838695e10da2bb75ac1156565f40a2595fa2fa0
788 -7838695e10da2bb75ac1156565f40a2595fa2fa0
789 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6
789 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6
790 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
790 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
791 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000
791 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000
792 @@ -0,0 +1,1 @@
792 @@ -0,0 +1,1 @@
793 +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30
793 +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30
794 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7
794 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7
795 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
795 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
796 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000
796 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000
797 @@ -0,0 +1,1 @@
797 @@ -0,0 +1,1 @@
798 +bb3151689acb10f0c3125c560d5e63df914bc1af
798 +bb3151689acb10f0c3125c560d5e63df914bc1af
799
799
800
800
801 $ hg log --patch -r 6::7 sub/
801 $ hg log --patch -r 6::7 sub/
802 changeset: 6:4355d653f84f
802 changeset: 6:4355d653f84f
803 user: test
803 user: test
804 date: Thu Jan 01 00:00:00 1970 +0000
804 date: Thu Jan 01 00:00:00 1970 +0000
805 summary: edit files yet again
805 summary: edit files yet again
806
806
807 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
807 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
808 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
808 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
809 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
809 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
810 @@ -1,1 +1,1 @@
810 @@ -1,1 +1,1 @@
811 -aeb2210d19f02886dde00dac279729a48471e2f9
811 -aeb2210d19f02886dde00dac279729a48471e2f9
812 +971fb41e78fea4f8e0ba5244784239371cb00591
812 +971fb41e78fea4f8e0ba5244784239371cb00591
813 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
813 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
814 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
814 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
815 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
815 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
816 @@ -1,1 +1,1 @@
816 @@ -1,1 +1,1 @@
817 -normal4
817 -normal4
818 +normal44
818 +normal44
819
819
820
820
821 log with both --follow and --patch
821 log with both --follow and --patch
822
822
823 $ hg log --follow --patch --limit 2
823 $ hg log --follow --patch --limit 2
824 changeset: 7:daea875e9014
824 changeset: 7:daea875e9014
825 tag: tip
825 tag: tip
826 user: test
826 user: test
827 date: Thu Jan 01 00:00:00 1970 +0000
827 date: Thu Jan 01 00:00:00 1970 +0000
828 summary: add/edit more largefiles
828 summary: add/edit more largefiles
829
829
830 diff -r 4355d653f84f -r daea875e9014 .hglf/large3
830 diff -r 4355d653f84f -r daea875e9014 .hglf/large3
831 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
831 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
832 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
832 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
833 @@ -1,1 +0,0 @@
833 @@ -1,1 +0,0 @@
834 -7838695e10da2bb75ac1156565f40a2595fa2fa0
834 -7838695e10da2bb75ac1156565f40a2595fa2fa0
835 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6
835 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6
836 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
836 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
837 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000
837 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000
838 @@ -0,0 +1,1 @@
838 @@ -0,0 +1,1 @@
839 +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30
839 +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30
840 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7
840 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7
841 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
841 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
842 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000
842 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000
843 @@ -0,0 +1,1 @@
843 @@ -0,0 +1,1 @@
844 +bb3151689acb10f0c3125c560d5e63df914bc1af
844 +bb3151689acb10f0c3125c560d5e63df914bc1af
845
845
846 changeset: 6:4355d653f84f
846 changeset: 6:4355d653f84f
847 user: test
847 user: test
848 date: Thu Jan 01 00:00:00 1970 +0000
848 date: Thu Jan 01 00:00:00 1970 +0000
849 summary: edit files yet again
849 summary: edit files yet again
850
850
851 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3
851 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3
852 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
852 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
853 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
853 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000
854 @@ -1,1 +1,1 @@
854 @@ -1,1 +1,1 @@
855 -baaf12afde9d8d67f25dab6dced0d2bf77dba47c
855 -baaf12afde9d8d67f25dab6dced0d2bf77dba47c
856 +7838695e10da2bb75ac1156565f40a2595fa2fa0
856 +7838695e10da2bb75ac1156565f40a2595fa2fa0
857 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
857 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
858 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
858 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
859 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
859 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
860 @@ -1,1 +1,1 @@
860 @@ -1,1 +1,1 @@
861 -aeb2210d19f02886dde00dac279729a48471e2f9
861 -aeb2210d19f02886dde00dac279729a48471e2f9
862 +971fb41e78fea4f8e0ba5244784239371cb00591
862 +971fb41e78fea4f8e0ba5244784239371cb00591
863 diff -r 9d5af5072dbd -r 4355d653f84f normal3
863 diff -r 9d5af5072dbd -r 4355d653f84f normal3
864 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000
864 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000
865 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000
865 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000
866 @@ -1,1 +1,1 @@
866 @@ -1,1 +1,1 @@
867 -normal3
867 -normal3
868 +normal33
868 +normal33
869 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
869 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4
870 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
870 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
871 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
871 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000
872 @@ -1,1 +1,1 @@
872 @@ -1,1 +1,1 @@
873 -normal4
873 -normal4
874 +normal44
874 +normal44
875
875
876 $ hg log --follow --patch sub/large4
876 $ hg log --follow --patch sub/large4
877 changeset: 6:4355d653f84f
877 changeset: 6:4355d653f84f
878 user: test
878 user: test
879 date: Thu Jan 01 00:00:00 1970 +0000
879 date: Thu Jan 01 00:00:00 1970 +0000
880 summary: edit files yet again
880 summary: edit files yet again
881
881
882 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
882 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4
883 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
883 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
884 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
884 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
885 @@ -1,1 +1,1 @@
885 @@ -1,1 +1,1 @@
886 -aeb2210d19f02886dde00dac279729a48471e2f9
886 -aeb2210d19f02886dde00dac279729a48471e2f9
887 +971fb41e78fea4f8e0ba5244784239371cb00591
887 +971fb41e78fea4f8e0ba5244784239371cb00591
888
888
889 changeset: 5:9d5af5072dbd
889 changeset: 5:9d5af5072dbd
890 user: test
890 user: test
891 date: Thu Jan 01 00:00:00 1970 +0000
891 date: Thu Jan 01 00:00:00 1970 +0000
892 summary: edit files again
892 summary: edit files again
893
893
894 diff -r 74c02385b94c -r 9d5af5072dbd .hglf/sub/large4
894 diff -r 74c02385b94c -r 9d5af5072dbd .hglf/sub/large4
895 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
895 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
896 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
896 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
897 @@ -1,1 +1,1 @@
897 @@ -1,1 +1,1 @@
898 -eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
898 -eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
899 +aeb2210d19f02886dde00dac279729a48471e2f9
899 +aeb2210d19f02886dde00dac279729a48471e2f9
900
900
901 changeset: 4:74c02385b94c
901 changeset: 4:74c02385b94c
902 user: test
902 user: test
903 date: Thu Jan 01 00:00:00 1970 +0000
903 date: Thu Jan 01 00:00:00 1970 +0000
904 summary: move files
904 summary: move files
905
905
906 diff -r 9e8fbc4bce62 -r 74c02385b94c .hglf/sub/large4
906 diff -r 9e8fbc4bce62 -r 74c02385b94c .hglf/sub/large4
907 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
907 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
908 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
908 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000
909 @@ -0,0 +1,1 @@
909 @@ -0,0 +1,1 @@
910 +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
910 +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
911
911
912 changeset: 1:ce8896473775
912 changeset: 1:ce8896473775
913 user: test
913 user: test
914 date: Thu Jan 01 00:00:00 1970 +0000
914 date: Thu Jan 01 00:00:00 1970 +0000
915 summary: edit files
915 summary: edit files
916
916
917 diff -r 30d30fe6a5be -r ce8896473775 .hglf/sub/large2
917 diff -r 30d30fe6a5be -r ce8896473775 .hglf/sub/large2
918 --- a/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
918 --- a/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
919 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
919 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
920 @@ -1,1 +1,1 @@
920 @@ -1,1 +1,1 @@
921 -1deebade43c8c498a3c8daddac0244dc55d1331d
921 -1deebade43c8c498a3c8daddac0244dc55d1331d
922 +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
922 +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
923
923
924 changeset: 0:30d30fe6a5be
924 changeset: 0:30d30fe6a5be
925 user: test
925 user: test
926 date: Thu Jan 01 00:00:00 1970 +0000
926 date: Thu Jan 01 00:00:00 1970 +0000
927 summary: add files
927 summary: add files
928
928
929 diff -r 000000000000 -r 30d30fe6a5be .hglf/sub/large2
929 diff -r 000000000000 -r 30d30fe6a5be .hglf/sub/large2
930 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
930 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
931 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
931 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000
932 @@ -0,0 +1,1 @@
932 @@ -0,0 +1,1 @@
933 +1deebade43c8c498a3c8daddac0244dc55d1331d
933 +1deebade43c8c498a3c8daddac0244dc55d1331d
934
934
935 $ cat sub/normal4
935 $ cat sub/normal4
936 normal44
936 normal44
937 $ cat sub/large4
937 $ cat sub/large4
938 large44
938 large44
939 $ cat sub2/large6
939 $ cat sub2/large6
940 large6
940 large6
941 $ cat sub2/large7
941 $ cat sub2/large7
942 large7
942 large7
943 $ hg log -qf sub2/large7
943 $ hg log -qf sub2/large7
944 7:daea875e9014
944 7:daea875e9014
945 $ hg log -Gqf sub2/large7
945 $ hg log -Gqf sub2/large7
946 @ 7:daea875e9014
946 @ 7:daea875e9014
947 |
947 |
948 ~
948 ~
949 $ cd ..
949 $ cd ..
950
950
951 Test log from outside repo
951 Test log from outside repo
952
952
953 $ hg log b/sub -T '{rev}:{node|short} {desc|firstline}\n'
953 $ hg log b/sub -T '{rev}:{node|short} {desc|firstline}\n'
954 6:4355d653f84f edit files yet again
954 6:4355d653f84f edit files yet again
955 5:9d5af5072dbd edit files again
955 5:9d5af5072dbd edit files again
956 4:74c02385b94c move files
956 4:74c02385b94c move files
957 1:ce8896473775 edit files
957 1:ce8896473775 edit files
958 0:30d30fe6a5be add files
958 0:30d30fe6a5be add files
959
959
960 Test clone at revision
960 Test clone at revision
961
961
962 $ hg clone a -r 3 c
962 $ hg clone a -r 3 c
963 adding changesets
963 adding changesets
964 adding manifests
964 adding manifests
965 adding file changes
965 adding file changes
966 added 4 changesets with 10 changes to 4 files
966 added 4 changesets with 10 changes to 4 files
967 new changesets 30d30fe6a5be:9e8fbc4bce62 (4 drafts)
967 new changesets 30d30fe6a5be:9e8fbc4bce62 (4 drafts)
968 updating to branch default
968 updating to branch default
969 getting changed largefiles
969 getting changed largefiles
970 2 largefiles updated, 0 removed
970 2 largefiles updated, 0 removed
971 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
971 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
972 $ cd c
972 $ cd c
973 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
973 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
974 3:9e8fbc4bce62 copy files
974 3:9e8fbc4bce62 copy files
975 2:51a0ae4d5864 remove files
975 2:51a0ae4d5864 remove files
976 1:ce8896473775 edit files
976 1:ce8896473775 edit files
977 0:30d30fe6a5be add files
977 0:30d30fe6a5be add files
978 $ cat normal1
978 $ cat normal1
979 normal22
979 normal22
980 $ cat large1
980 $ cat large1
981 large22
981 large22
982 $ cat sub/normal2
982 $ cat sub/normal2
983 normal22
983 normal22
984 $ cat sub/large2
984 $ cat sub/large2
985 large22
985 large22
986
986
987 Old revisions of a clone have correct largefiles content (this also
987 Old revisions of a clone have correct largefiles content (this also
988 tests update).
988 tests update).
989
989
990 $ hg update -r 1
990 $ hg update -r 1
991 getting changed largefiles
991 getting changed largefiles
992 1 largefiles updated, 0 removed
992 1 largefiles updated, 0 removed
993 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
993 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
994 $ cat large1
994 $ cat large1
995 large11
995 large11
996 $ cat sub/large2
996 $ cat sub/large2
997 large22
997 large22
998 $ cd ..
998 $ cd ..
999
999
1000 Test cloning with --all-largefiles flag
1000 Test cloning with --all-largefiles flag
1001
1001
1002 $ rm "${USERCACHE}"/*
1002 $ rm "${USERCACHE}"/*
1003 $ hg clone --all-largefiles a a-backup
1003 $ hg clone --all-largefiles a a-backup
1004 updating to branch default
1004 updating to branch default
1005 getting changed largefiles
1005 getting changed largefiles
1006 3 largefiles updated, 0 removed
1006 3 largefiles updated, 0 removed
1007 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1007 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1008 7 additional largefiles cached
1008 7 additional largefiles cached
1009
1009
1010 $ rm "${USERCACHE}"/*
1010 $ rm "${USERCACHE}"/*
1011 $ hg clone --all-largefiles -u 0 a a-clone0
1011 $ hg clone --all-largefiles -u 0 a a-clone0
1012 updating to branch default
1012 updating to branch default
1013 getting changed largefiles
1013 getting changed largefiles
1014 2 largefiles updated, 0 removed
1014 2 largefiles updated, 0 removed
1015 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1015 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1016 8 additional largefiles cached
1016 8 additional largefiles cached
1017 $ hg -R a-clone0 sum
1017 $ hg -R a-clone0 sum
1018 parent: 0:30d30fe6a5be
1018 parent: 0:30d30fe6a5be
1019 add files
1019 add files
1020 branch: default
1020 branch: default
1021 commit: (clean)
1021 commit: (clean)
1022 update: 7 new changesets (update)
1022 update: 7 new changesets (update)
1023 phases: 8 draft
1023 phases: 8 draft
1024
1024
1025 $ rm "${USERCACHE}"/*
1025 $ rm "${USERCACHE}"/*
1026 $ hg clone --all-largefiles -u 1 a a-clone1
1026 $ hg clone --all-largefiles -u 1 a a-clone1
1027 updating to branch default
1027 updating to branch default
1028 getting changed largefiles
1028 getting changed largefiles
1029 2 largefiles updated, 0 removed
1029 2 largefiles updated, 0 removed
1030 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1030 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1031 8 additional largefiles cached
1031 8 additional largefiles cached
1032 $ hg -R a-clone1 verify --large --lfa --lfc -q
1032 $ hg -R a-clone1 verify --large --lfa --lfc -q
1033 $ hg -R a-clone1 sum
1033 $ hg -R a-clone1 sum
1034 parent: 1:ce8896473775
1034 parent: 1:ce8896473775
1035 edit files
1035 edit files
1036 branch: default
1036 branch: default
1037 commit: (clean)
1037 commit: (clean)
1038 update: 6 new changesets (update)
1038 update: 6 new changesets (update)
1039 phases: 8 draft
1039 phases: 8 draft
1040
1040
1041 $ rm "${USERCACHE}"/*
1041 $ rm "${USERCACHE}"/*
1042 $ hg clone --all-largefiles -U a a-clone-u
1042 $ hg clone --all-largefiles -U a a-clone-u
1043 10 additional largefiles cached
1043 10 additional largefiles cached
1044 $ hg -R a-clone-u sum
1044 $ hg -R a-clone-u sum
1045 parent: -1:000000000000 (no revision checked out)
1045 parent: -1:000000000000 (no revision checked out)
1046 branch: default
1046 branch: default
1047 commit: (clean)
1047 commit: (clean)
1048 update: 8 new changesets (update)
1048 update: 8 new changesets (update)
1049 phases: 8 draft
1049 phases: 8 draft
1050
1050
1051 Show computed destination directory:
1051 Show computed destination directory:
1052
1052
1053 $ mkdir xyz
1053 $ mkdir xyz
1054 $ cd xyz
1054 $ cd xyz
1055 $ hg clone ../a
1055 $ hg clone ../a
1056 destination directory: a
1056 destination directory: a
1057 updating to branch default
1057 updating to branch default
1058 getting changed largefiles
1058 getting changed largefiles
1059 3 largefiles updated, 0 removed
1059 3 largefiles updated, 0 removed
1060 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1060 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1061 $ cd ..
1061 $ cd ..
1062
1062
1063 Clone URL without path:
1063 Clone URL without path:
1064
1064
1065 $ hg clone file://
1065 $ hg clone file://
1066 abort: repository / not found
1066 abort: repository / not found
1067 [255]
1067 [255]
1068
1068
1069 Ensure base clone command argument validation
1069 Ensure base clone command argument validation
1070
1070
1071 $ hg clone -U -u 0 a a-clone-failure
1071 $ hg clone -U -u 0 a a-clone-failure
1072 abort: cannot specify both --noupdate and --updaterev
1072 abort: cannot specify both --noupdate and --updaterev
1073 [10]
1073 [10]
1074
1074
1075 $ hg clone --all-largefiles a ssh://localhost/a
1075 $ hg clone --all-largefiles a ssh://localhost/a
1076 abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a
1076 abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a
1077 [255]
1077 [255]
1078
1078
1079 $ touch existing_destination
1080 $ hg clone --all-largefiles a existing_destination
1081 abort: destination 'existing_destination' already exists
1082 [10]
1083
1079 Test pulling with --all-largefiles flag. Also test that the largefiles are
1084 Test pulling with --all-largefiles flag. Also test that the largefiles are
1080 downloaded from 'default' instead of 'default-push' when no source is specified
1085 downloaded from 'default' instead of 'default-push' when no source is specified
1081 (issue3584)
1086 (issue3584)
1082
1087
1083 $ rm -Rf a-backup
1088 $ rm -Rf a-backup
1084 $ hg clone -r 1 a a-backup
1089 $ hg clone -r 1 a a-backup
1085 adding changesets
1090 adding changesets
1086 adding manifests
1091 adding manifests
1087 adding file changes
1092 adding file changes
1088 added 2 changesets with 8 changes to 4 files
1093 added 2 changesets with 8 changes to 4 files
1089 new changesets 30d30fe6a5be:ce8896473775 (2 drafts)
1094 new changesets 30d30fe6a5be:ce8896473775 (2 drafts)
1090 updating to branch default
1095 updating to branch default
1091 getting changed largefiles
1096 getting changed largefiles
1092 2 largefiles updated, 0 removed
1097 2 largefiles updated, 0 removed
1093 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1098 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1094 $ rm "${USERCACHE}"/*
1099 $ rm "${USERCACHE}"/*
1095 $ cd a-backup
1100 $ cd a-backup
1096 $ hg pull --all-largefiles --config paths.default-push=bogus/path
1101 $ hg pull --all-largefiles --config paths.default-push=bogus/path
1097 pulling from $TESTTMP/a
1102 pulling from $TESTTMP/a
1098 searching for changes
1103 searching for changes
1099 adding changesets
1104 adding changesets
1100 adding manifests
1105 adding manifests
1101 adding file changes
1106 adding file changes
1102 added 6 changesets with 16 changes to 8 files
1107 added 6 changesets with 16 changes to 8 files
1103 new changesets 51a0ae4d5864:daea875e9014 (6 drafts)
1108 new changesets 51a0ae4d5864:daea875e9014 (6 drafts)
1104 (run 'hg update' to get a working copy)
1109 (run 'hg update' to get a working copy)
1105 6 largefiles cached
1110 6 largefiles cached
1106
1111
1107 redo pull with --lfrev and check it pulls largefiles for the right revs
1112 redo pull with --lfrev and check it pulls largefiles for the right revs
1108
1113
1109 $ hg rollback
1114 $ hg rollback
1110 repository tip rolled back to revision 1 (undo pull)
1115 repository tip rolled back to revision 1 (undo pull)
1111 $ hg pull -v --lfrev 'heads(pulled())+min(pulled())'
1116 $ hg pull -v --lfrev 'heads(pulled())+min(pulled())'
1112 pulling from $TESTTMP/a
1117 pulling from $TESTTMP/a
1113 searching for changes
1118 searching for changes
1114 all local changesets known remotely
1119 all local changesets known remotely
1115 6 changesets found
1120 6 changesets found
1116 uncompressed size of bundle content:
1121 uncompressed size of bundle content:
1117 1401 (changelog)
1122 1401 (changelog)
1118 1710 (manifests)
1123 1710 (manifests)
1119 256 .hglf/large1
1124 256 .hglf/large1
1120 570 .hglf/large3
1125 570 .hglf/large3
1121 578 .hglf/sub/large4
1126 578 .hglf/sub/large4
1122 184 .hglf/sub2/large6
1127 184 .hglf/sub2/large6
1123 184 .hglf/sub2/large7
1128 184 .hglf/sub2/large7
1124 214 normal1
1129 214 normal1
1125 463 normal3
1130 463 normal3
1126 471 sub/normal4
1131 471 sub/normal4
1127 adding changesets
1132 adding changesets
1128 adding manifests
1133 adding manifests
1129 adding file changes
1134 adding file changes
1130 added 6 changesets with 16 changes to 8 files
1135 added 6 changesets with 16 changes to 8 files
1131 new changesets 51a0ae4d5864:daea875e9014 (6 drafts)
1136 new changesets 51a0ae4d5864:daea875e9014 (6 drafts)
1132 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
1137 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
1133 (run 'hg update' to get a working copy)
1138 (run 'hg update' to get a working copy)
1134 pulling largefiles for revision 7
1139 pulling largefiles for revision 7
1135 found 971fb41e78fea4f8e0ba5244784239371cb00591 in store
1140 found 971fb41e78fea4f8e0ba5244784239371cb00591 in store
1136 found 0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30 in store
1141 found 0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30 in store
1137 found bb3151689acb10f0c3125c560d5e63df914bc1af in store
1142 found bb3151689acb10f0c3125c560d5e63df914bc1af in store
1138 pulling largefiles for revision 2
1143 pulling largefiles for revision 2
1139 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1144 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1140 0 largefiles cached
1145 0 largefiles cached
1141
1146
1142 lfpull
1147 lfpull
1143
1148
1144 $ hg lfpull -r : --config largefiles.usercache=usercache-lfpull
1149 $ hg lfpull -r : --config largefiles.usercache=usercache-lfpull
1145 2 largefiles cached
1150 2 largefiles cached
1146 $ hg lfpull -v -r 4+2 --config largefiles.usercache=usercache-lfpull
1151 $ hg lfpull -v -r 4+2 --config largefiles.usercache=usercache-lfpull
1147 pulling largefiles for revision 4
1152 pulling largefiles for revision 4
1148 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1153 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1149 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1154 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1150 pulling largefiles for revision 2
1155 pulling largefiles for revision 2
1151 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1156 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store
1152 0 largefiles cached
1157 0 largefiles cached
1153
1158
1154 $ ls usercache-lfpull/* | sort
1159 $ ls usercache-lfpull/* | sort
1155 usercache-lfpull/1deebade43c8c498a3c8daddac0244dc55d1331d
1160 usercache-lfpull/1deebade43c8c498a3c8daddac0244dc55d1331d
1156 usercache-lfpull/4669e532d5b2c093a78eca010077e708a071bb64
1161 usercache-lfpull/4669e532d5b2c093a78eca010077e708a071bb64
1157
1162
1158 $ cd ..
1163 $ cd ..
1159
1164
1160 Rebasing between two repositories does not revert largefiles to old
1165 Rebasing between two repositories does not revert largefiles to old
1161 revisions (this was a very bad bug that took a lot of work to fix).
1166 revisions (this was a very bad bug that took a lot of work to fix).
1162
1167
1163 $ hg clone a d
1168 $ hg clone a d
1164 updating to branch default
1169 updating to branch default
1165 getting changed largefiles
1170 getting changed largefiles
1166 3 largefiles updated, 0 removed
1171 3 largefiles updated, 0 removed
1167 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1172 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1168 $ cd b
1173 $ cd b
1169 $ echo large4-modified > sub/large4
1174 $ echo large4-modified > sub/large4
1170 $ echo normal3-modified > normal3
1175 $ echo normal3-modified > normal3
1171 $ hg commit -m "modify normal file and largefile in repo b"
1176 $ hg commit -m "modify normal file and largefile in repo b"
1172 Invoking status precommit hook
1177 Invoking status precommit hook
1173 M normal3
1178 M normal3
1174 M sub/large4
1179 M sub/large4
1175 $ cd ../d
1180 $ cd ../d
1176 $ echo large6-modified > sub2/large6
1181 $ echo large6-modified > sub2/large6
1177 $ echo normal4-modified > sub/normal4
1182 $ echo normal4-modified > sub/normal4
1178 $ hg commit -m "modify normal file largefile in repo d"
1183 $ hg commit -m "modify normal file largefile in repo d"
1179 Invoking status precommit hook
1184 Invoking status precommit hook
1180 M sub/normal4
1185 M sub/normal4
1181 M sub2/large6
1186 M sub2/large6
1182 $ cd ..
1187 $ cd ..
1183 $ hg clone d e
1188 $ hg clone d e
1184 updating to branch default
1189 updating to branch default
1185 getting changed largefiles
1190 getting changed largefiles
1186 3 largefiles updated, 0 removed
1191 3 largefiles updated, 0 removed
1187 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1192 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1188 $ cd d
1193 $ cd d
1189
1194
1190 More rebase testing, but also test that the largefiles are downloaded from
1195 More rebase testing, but also test that the largefiles are downloaded from
1191 'default-push' when no source is specified (issue3584). (The largefile from the
1196 'default-push' when no source is specified (issue3584). (The largefile from the
1192 pulled revision is however not downloaded but found in the local cache.)
1197 pulled revision is however not downloaded but found in the local cache.)
1193 Largefiles are fetched for the new pulled revision, not for existing revisions,
1198 Largefiles are fetched for the new pulled revision, not for existing revisions,
1194 rebased or not.
1199 rebased or not.
1195
1200
1196 $ [ ! -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
1201 $ [ ! -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
1197 $ hg pull --rebase --all-largefiles --config paths.default-push=bogus/path --config paths.default=../b
1202 $ hg pull --rebase --all-largefiles --config paths.default-push=bogus/path --config paths.default=../b
1198 pulling from $TESTTMP/b
1203 pulling from $TESTTMP/b
1199 searching for changes
1204 searching for changes
1200 adding changesets
1205 adding changesets
1201 adding manifests
1206 adding manifests
1202 adding file changes
1207 adding file changes
1203 added 1 changesets with 2 changes to 2 files (+1 heads)
1208 added 1 changesets with 2 changes to 2 files (+1 heads)
1204 new changesets a381d2c8c80e (1 drafts)
1209 new changesets a381d2c8c80e (1 drafts)
1205 0 largefiles cached
1210 0 largefiles cached
1206 rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
1211 rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
1207 Invoking status precommit hook
1212 Invoking status precommit hook
1208 M sub/normal4
1213 M sub/normal4
1209 M sub2/large6
1214 M sub2/large6
1210 saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
1215 saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
1211 $ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
1216 $ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
1212 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1217 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1213 9:598410d3eb9a modify normal file largefile in repo d
1218 9:598410d3eb9a modify normal file largefile in repo d
1214 8:a381d2c8c80e modify normal file and largefile in repo b
1219 8:a381d2c8c80e modify normal file and largefile in repo b
1215 7:daea875e9014 add/edit more largefiles
1220 7:daea875e9014 add/edit more largefiles
1216 6:4355d653f84f edit files yet again
1221 6:4355d653f84f edit files yet again
1217 5:9d5af5072dbd edit files again
1222 5:9d5af5072dbd edit files again
1218 4:74c02385b94c move files
1223 4:74c02385b94c move files
1219 3:9e8fbc4bce62 copy files
1224 3:9e8fbc4bce62 copy files
1220 2:51a0ae4d5864 remove files
1225 2:51a0ae4d5864 remove files
1221 1:ce8896473775 edit files
1226 1:ce8896473775 edit files
1222 0:30d30fe6a5be add files
1227 0:30d30fe6a5be add files
1223 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n'
1228 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n'
1224 @ 9:598410d3eb9a modify normal file largefile in repo d
1229 @ 9:598410d3eb9a modify normal file largefile in repo d
1225 |
1230 |
1226 o 8:a381d2c8c80e modify normal file and largefile in repo b
1231 o 8:a381d2c8c80e modify normal file and largefile in repo b
1227 |
1232 |
1228 o 7:daea875e9014 add/edit more largefiles
1233 o 7:daea875e9014 add/edit more largefiles
1229 |
1234 |
1230 o 6:4355d653f84f edit files yet again
1235 o 6:4355d653f84f edit files yet again
1231 |
1236 |
1232 o 5:9d5af5072dbd edit files again
1237 o 5:9d5af5072dbd edit files again
1233 |
1238 |
1234 o 4:74c02385b94c move files
1239 o 4:74c02385b94c move files
1235 |
1240 |
1236 o 3:9e8fbc4bce62 copy files
1241 o 3:9e8fbc4bce62 copy files
1237 |
1242 |
1238 o 2:51a0ae4d5864 remove files
1243 o 2:51a0ae4d5864 remove files
1239 |
1244 |
1240 o 1:ce8896473775 edit files
1245 o 1:ce8896473775 edit files
1241 |
1246 |
1242 o 0:30d30fe6a5be add files
1247 o 0:30d30fe6a5be add files
1243
1248
1244 $ cat normal3
1249 $ cat normal3
1245 normal3-modified
1250 normal3-modified
1246 $ cat sub/normal4
1251 $ cat sub/normal4
1247 normal4-modified
1252 normal4-modified
1248 $ cat sub/large4
1253 $ cat sub/large4
1249 large4-modified
1254 large4-modified
1250 $ cat sub2/large6
1255 $ cat sub2/large6
1251 large6-modified
1256 large6-modified
1252 $ cat sub2/large7
1257 $ cat sub2/large7
1253 large7
1258 large7
1254 $ cd ../e
1259 $ cd ../e
1255 $ hg pull ../b
1260 $ hg pull ../b
1256 pulling from ../b
1261 pulling from ../b
1257 searching for changes
1262 searching for changes
1258 adding changesets
1263 adding changesets
1259 adding manifests
1264 adding manifests
1260 adding file changes
1265 adding file changes
1261 added 1 changesets with 2 changes to 2 files (+1 heads)
1266 added 1 changesets with 2 changes to 2 files (+1 heads)
1262 new changesets a381d2c8c80e (1 drafts)
1267 new changesets a381d2c8c80e (1 drafts)
1263 (run 'hg heads' to see heads, 'hg merge' to merge)
1268 (run 'hg heads' to see heads, 'hg merge' to merge)
1264 $ hg rebase
1269 $ hg rebase
1265 rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
1270 rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
1266 Invoking status precommit hook
1271 Invoking status precommit hook
1267 M sub/normal4
1272 M sub/normal4
1268 M sub2/large6
1273 M sub2/large6
1269 saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
1274 saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg
1270 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1275 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1271 9:598410d3eb9a modify normal file largefile in repo d
1276 9:598410d3eb9a modify normal file largefile in repo d
1272 8:a381d2c8c80e modify normal file and largefile in repo b
1277 8:a381d2c8c80e modify normal file and largefile in repo b
1273 7:daea875e9014 add/edit more largefiles
1278 7:daea875e9014 add/edit more largefiles
1274 6:4355d653f84f edit files yet again
1279 6:4355d653f84f edit files yet again
1275 5:9d5af5072dbd edit files again
1280 5:9d5af5072dbd edit files again
1276 4:74c02385b94c move files
1281 4:74c02385b94c move files
1277 3:9e8fbc4bce62 copy files
1282 3:9e8fbc4bce62 copy files
1278 2:51a0ae4d5864 remove files
1283 2:51a0ae4d5864 remove files
1279 1:ce8896473775 edit files
1284 1:ce8896473775 edit files
1280 0:30d30fe6a5be add files
1285 0:30d30fe6a5be add files
1281 $ cat normal3
1286 $ cat normal3
1282 normal3-modified
1287 normal3-modified
1283 $ cat sub/normal4
1288 $ cat sub/normal4
1284 normal4-modified
1289 normal4-modified
1285 $ cat sub/large4
1290 $ cat sub/large4
1286 large4-modified
1291 large4-modified
1287 $ cat sub2/large6
1292 $ cat sub2/large6
1288 large6-modified
1293 large6-modified
1289 $ cat sub2/large7
1294 $ cat sub2/large7
1290 large7
1295 large7
1291
1296
1292 Log on largefiles
1297 Log on largefiles
1293
1298
1294 - same output
1299 - same output
1295 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1300 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1296 8:a381d2c8c80e modify normal file and largefile in repo b
1301 8:a381d2c8c80e modify normal file and largefile in repo b
1297 6:4355d653f84f edit files yet again
1302 6:4355d653f84f edit files yet again
1298 5:9d5af5072dbd edit files again
1303 5:9d5af5072dbd edit files again
1299 4:74c02385b94c move files
1304 4:74c02385b94c move files
1300 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1305 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1301 o 8:a381d2c8c80e modify normal file and largefile in repo b
1306 o 8:a381d2c8c80e modify normal file and largefile in repo b
1302 :
1307 :
1303 o 6:4355d653f84f edit files yet again
1308 o 6:4355d653f84f edit files yet again
1304 |
1309 |
1305 o 5:9d5af5072dbd edit files again
1310 o 5:9d5af5072dbd edit files again
1306 |
1311 |
1307 o 4:74c02385b94c move files
1312 o 4:74c02385b94c move files
1308 |
1313 |
1309 ~
1314 ~
1310 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub/large4
1315 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub/large4
1311 8:a381d2c8c80e modify normal file and largefile in repo b
1316 8:a381d2c8c80e modify normal file and largefile in repo b
1312 6:4355d653f84f edit files yet again
1317 6:4355d653f84f edit files yet again
1313 5:9d5af5072dbd edit files again
1318 5:9d5af5072dbd edit files again
1314 4:74c02385b94c move files
1319 4:74c02385b94c move files
1315 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1320 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4
1316 o 8:a381d2c8c80e modify normal file and largefile in repo b
1321 o 8:a381d2c8c80e modify normal file and largefile in repo b
1317 :
1322 :
1318 o 6:4355d653f84f edit files yet again
1323 o 6:4355d653f84f edit files yet again
1319 |
1324 |
1320 o 5:9d5af5072dbd edit files again
1325 o 5:9d5af5072dbd edit files again
1321 |
1326 |
1322 o 4:74c02385b94c move files
1327 o 4:74c02385b94c move files
1323 |
1328 |
1324 ~
1329 ~
1325
1330
1326 - .hglf only matches largefiles, without .hglf it matches 9 bco sub/normal
1331 - .hglf only matches largefiles, without .hglf it matches 9 bco sub/normal
1327 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub
1332 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub
1328 8:a381d2c8c80e modify normal file and largefile in repo b
1333 8:a381d2c8c80e modify normal file and largefile in repo b
1329 6:4355d653f84f edit files yet again
1334 6:4355d653f84f edit files yet again
1330 5:9d5af5072dbd edit files again
1335 5:9d5af5072dbd edit files again
1331 4:74c02385b94c move files
1336 4:74c02385b94c move files
1332 1:ce8896473775 edit files
1337 1:ce8896473775 edit files
1333 0:30d30fe6a5be add files
1338 0:30d30fe6a5be add files
1334 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub
1339 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub
1335 o 8:a381d2c8c80e modify normal file and largefile in repo b
1340 o 8:a381d2c8c80e modify normal file and largefile in repo b
1336 :
1341 :
1337 o 6:4355d653f84f edit files yet again
1342 o 6:4355d653f84f edit files yet again
1338 |
1343 |
1339 o 5:9d5af5072dbd edit files again
1344 o 5:9d5af5072dbd edit files again
1340 |
1345 |
1341 o 4:74c02385b94c move files
1346 o 4:74c02385b94c move files
1342 :
1347 :
1343 o 1:ce8896473775 edit files
1348 o 1:ce8896473775 edit files
1344 |
1349 |
1345 o 0:30d30fe6a5be add files
1350 o 0:30d30fe6a5be add files
1346
1351
1347 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub
1352 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub
1348 9:598410d3eb9a modify normal file largefile in repo d
1353 9:598410d3eb9a modify normal file largefile in repo d
1349 8:a381d2c8c80e modify normal file and largefile in repo b
1354 8:a381d2c8c80e modify normal file and largefile in repo b
1350 6:4355d653f84f edit files yet again
1355 6:4355d653f84f edit files yet again
1351 5:9d5af5072dbd edit files again
1356 5:9d5af5072dbd edit files again
1352 4:74c02385b94c move files
1357 4:74c02385b94c move files
1353 1:ce8896473775 edit files
1358 1:ce8896473775 edit files
1354 0:30d30fe6a5be add files
1359 0:30d30fe6a5be add files
1355 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' sub
1360 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' sub
1356 @ 9:598410d3eb9a modify normal file largefile in repo d
1361 @ 9:598410d3eb9a modify normal file largefile in repo d
1357 |
1362 |
1358 o 8:a381d2c8c80e modify normal file and largefile in repo b
1363 o 8:a381d2c8c80e modify normal file and largefile in repo b
1359 :
1364 :
1360 o 6:4355d653f84f edit files yet again
1365 o 6:4355d653f84f edit files yet again
1361 |
1366 |
1362 o 5:9d5af5072dbd edit files again
1367 o 5:9d5af5072dbd edit files again
1363 |
1368 |
1364 o 4:74c02385b94c move files
1369 o 4:74c02385b94c move files
1365 :
1370 :
1366 o 1:ce8896473775 edit files
1371 o 1:ce8896473775 edit files
1367 |
1372 |
1368 o 0:30d30fe6a5be add files
1373 o 0:30d30fe6a5be add files
1369
1374
1370 - globbing gives same result
1375 - globbing gives same result
1371 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*'
1376 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*'
1372 9:598410d3eb9a modify normal file largefile in repo d
1377 9:598410d3eb9a modify normal file largefile in repo d
1373 8:a381d2c8c80e modify normal file and largefile in repo b
1378 8:a381d2c8c80e modify normal file and largefile in repo b
1374 6:4355d653f84f edit files yet again
1379 6:4355d653f84f edit files yet again
1375 5:9d5af5072dbd edit files again
1380 5:9d5af5072dbd edit files again
1376 4:74c02385b94c move files
1381 4:74c02385b94c move files
1377 1:ce8896473775 edit files
1382 1:ce8896473775 edit files
1378 0:30d30fe6a5be add files
1383 0:30d30fe6a5be add files
1379 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*'
1384 $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*'
1380 @ 9:598410d3eb9a modify normal file largefile in repo d
1385 @ 9:598410d3eb9a modify normal file largefile in repo d
1381 |
1386 |
1382 o 8:a381d2c8c80e modify normal file and largefile in repo b
1387 o 8:a381d2c8c80e modify normal file and largefile in repo b
1383 :
1388 :
1384 o 6:4355d653f84f edit files yet again
1389 o 6:4355d653f84f edit files yet again
1385 |
1390 |
1386 o 5:9d5af5072dbd edit files again
1391 o 5:9d5af5072dbd edit files again
1387 |
1392 |
1388 o 4:74c02385b94c move files
1393 o 4:74c02385b94c move files
1389 :
1394 :
1390 o 1:ce8896473775 edit files
1395 o 1:ce8896473775 edit files
1391 |
1396 |
1392 o 0:30d30fe6a5be add files
1397 o 0:30d30fe6a5be add files
1393
1398
1394 Rollback on largefiles.
1399 Rollback on largefiles.
1395
1400
1396 $ echo large4-modified-again > sub/large4
1401 $ echo large4-modified-again > sub/large4
1397 $ hg commit -m "Modify large4 again"
1402 $ hg commit -m "Modify large4 again"
1398 Invoking status precommit hook
1403 Invoking status precommit hook
1399 M sub/large4
1404 M sub/large4
1400 $ hg rollback
1405 $ hg rollback
1401 repository tip rolled back to revision 9 (undo commit)
1406 repository tip rolled back to revision 9 (undo commit)
1402 working directory now based on revision 9
1407 working directory now based on revision 9
1403 $ hg st
1408 $ hg st
1404 M sub/large4
1409 M sub/large4
1405 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1410 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1406 9:598410d3eb9a modify normal file largefile in repo d
1411 9:598410d3eb9a modify normal file largefile in repo d
1407 8:a381d2c8c80e modify normal file and largefile in repo b
1412 8:a381d2c8c80e modify normal file and largefile in repo b
1408 7:daea875e9014 add/edit more largefiles
1413 7:daea875e9014 add/edit more largefiles
1409 6:4355d653f84f edit files yet again
1414 6:4355d653f84f edit files yet again
1410 5:9d5af5072dbd edit files again
1415 5:9d5af5072dbd edit files again
1411 4:74c02385b94c move files
1416 4:74c02385b94c move files
1412 3:9e8fbc4bce62 copy files
1417 3:9e8fbc4bce62 copy files
1413 2:51a0ae4d5864 remove files
1418 2:51a0ae4d5864 remove files
1414 1:ce8896473775 edit files
1419 1:ce8896473775 edit files
1415 0:30d30fe6a5be add files
1420 0:30d30fe6a5be add files
1416 $ cat sub/large4
1421 $ cat sub/large4
1417 large4-modified-again
1422 large4-modified-again
1418
1423
1419 "update --check" refuses to update with uncommitted changes.
1424 "update --check" refuses to update with uncommitted changes.
1420 $ hg update --check 8
1425 $ hg update --check 8
1421 abort: uncommitted changes
1426 abort: uncommitted changes
1422 [255]
1427 [255]
1423
1428
1424 "update --clean" leaves correct largefiles in working copy, even when there is
1429 "update --clean" leaves correct largefiles in working copy, even when there is
1425 .orig files from revert in .hglf.
1430 .orig files from revert in .hglf.
1426
1431
1427 $ echo mistake > sub2/large7
1432 $ echo mistake > sub2/large7
1428 $ hg revert sub2/large7
1433 $ hg revert sub2/large7
1429 $ cat sub2/large7
1434 $ cat sub2/large7
1430 large7
1435 large7
1431 $ cat sub2/large7.orig
1436 $ cat sub2/large7.orig
1432 mistake
1437 mistake
1433 $ test ! -f .hglf/sub2/large7.orig
1438 $ test ! -f .hglf/sub2/large7.orig
1434
1439
1435 $ hg -q update --clean -r null
1440 $ hg -q update --clean -r null
1436 $ hg update --clean
1441 $ hg update --clean
1437 getting changed largefiles
1442 getting changed largefiles
1438 3 largefiles updated, 0 removed
1443 3 largefiles updated, 0 removed
1439 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1444 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1440 $ cat normal3
1445 $ cat normal3
1441 normal3-modified
1446 normal3-modified
1442 $ cat sub/normal4
1447 $ cat sub/normal4
1443 normal4-modified
1448 normal4-modified
1444 $ cat sub/large4
1449 $ cat sub/large4
1445 large4-modified
1450 large4-modified
1446 $ cat sub2/large6
1451 $ cat sub2/large6
1447 large6-modified
1452 large6-modified
1448 $ cat sub2/large7
1453 $ cat sub2/large7
1449 large7
1454 large7
1450 $ cat sub2/large7.orig
1455 $ cat sub2/large7.orig
1451 mistake
1456 mistake
1452 $ test ! -f .hglf/sub2/large7.orig
1457 $ test ! -f .hglf/sub2/large7.orig
1453
1458
1454 verify that largefile .orig file no longer is overwritten on every update -C:
1459 verify that largefile .orig file no longer is overwritten on every update -C:
1455 $ hg update --clean
1460 $ hg update --clean
1456 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1461 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1457 $ cat sub2/large7.orig
1462 $ cat sub2/large7.orig
1458 mistake
1463 mistake
1459 $ rm sub2/large7.orig
1464 $ rm sub2/large7.orig
1460
1465
1461 Now "update check" is happy.
1466 Now "update check" is happy.
1462 $ hg update --check 8
1467 $ hg update --check 8
1463 getting changed largefiles
1468 getting changed largefiles
1464 1 largefiles updated, 0 removed
1469 1 largefiles updated, 0 removed
1465 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1470 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1466 $ hg update --check
1471 $ hg update --check
1467 getting changed largefiles
1472 getting changed largefiles
1468 1 largefiles updated, 0 removed
1473 1 largefiles updated, 0 removed
1469 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1474 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1470
1475
1471 Test removing empty largefiles directories on update
1476 Test removing empty largefiles directories on update
1472 $ test -d sub2 && echo "sub2 exists"
1477 $ test -d sub2 && echo "sub2 exists"
1473 sub2 exists
1478 sub2 exists
1474 $ hg update -q null
1479 $ hg update -q null
1475 $ test -d sub2 && echo "error: sub2 should not exist anymore"
1480 $ test -d sub2 && echo "error: sub2 should not exist anymore"
1476 [1]
1481 [1]
1477 $ hg update -q
1482 $ hg update -q
1478
1483
1479 Test hg remove removes empty largefiles directories
1484 Test hg remove removes empty largefiles directories
1480 $ test -d sub2 && echo "sub2 exists"
1485 $ test -d sub2 && echo "sub2 exists"
1481 sub2 exists
1486 sub2 exists
1482 $ hg remove sub2/*
1487 $ hg remove sub2/*
1483 $ test -d sub2 && echo "error: sub2 should not exist anymore"
1488 $ test -d sub2 && echo "error: sub2 should not exist anymore"
1484 [1]
1489 [1]
1485 $ hg revert sub2/large6 sub2/large7
1490 $ hg revert sub2/large6 sub2/large7
1486
1491
1487 "revert" works on largefiles (and normal files too).
1492 "revert" works on largefiles (and normal files too).
1488 $ echo hack3 >> normal3
1493 $ echo hack3 >> normal3
1489 $ echo hack4 >> sub/normal4
1494 $ echo hack4 >> sub/normal4
1490 $ echo hack4 >> sub/large4
1495 $ echo hack4 >> sub/large4
1491 $ rm sub2/large6
1496 $ rm sub2/large6
1492 $ hg revert sub2/large6
1497 $ hg revert sub2/large6
1493 $ hg rm sub2/large6
1498 $ hg rm sub2/large6
1494 $ echo new >> sub2/large8
1499 $ echo new >> sub2/large8
1495 $ hg add --large sub2/large8
1500 $ hg add --large sub2/large8
1496 # XXX we don't really want to report that we're reverting the standin;
1501 # XXX we don't really want to report that we're reverting the standin;
1497 # that's just an implementation detail. But I don't see an obvious fix. ;-(
1502 # that's just an implementation detail. But I don't see an obvious fix. ;-(
1498 $ hg revert sub
1503 $ hg revert sub
1499 reverting .hglf/sub/large4
1504 reverting .hglf/sub/large4
1500 reverting sub/normal4
1505 reverting sub/normal4
1501 $ hg status
1506 $ hg status
1502 M normal3
1507 M normal3
1503 A sub2/large8
1508 A sub2/large8
1504 R sub2/large6
1509 R sub2/large6
1505 ? sub/large4.orig
1510 ? sub/large4.orig
1506 ? sub/normal4.orig
1511 ? sub/normal4.orig
1507 $ cat sub/normal4
1512 $ cat sub/normal4
1508 normal4-modified
1513 normal4-modified
1509 $ cat sub/large4
1514 $ cat sub/large4
1510 large4-modified
1515 large4-modified
1511 $ hg revert -a --no-backup
1516 $ hg revert -a --no-backup
1512 forgetting .hglf/sub2/large8
1517 forgetting .hglf/sub2/large8
1513 reverting normal3
1518 reverting normal3
1514 undeleting .hglf/sub2/large6
1519 undeleting .hglf/sub2/large6
1515 $ hg status
1520 $ hg status
1516 ? sub/large4.orig
1521 ? sub/large4.orig
1517 ? sub/normal4.orig
1522 ? sub/normal4.orig
1518 ? sub2/large8
1523 ? sub2/large8
1519 $ cat normal3
1524 $ cat normal3
1520 normal3-modified
1525 normal3-modified
1521 $ cat sub2/large6
1526 $ cat sub2/large6
1522 large6-modified
1527 large6-modified
1523 $ rm sub/*.orig sub2/large8
1528 $ rm sub/*.orig sub2/large8
1524
1529
1525 revert some files to an older revision
1530 revert some files to an older revision
1526 $ hg revert --no-backup -r 8 sub2
1531 $ hg revert --no-backup -r 8 sub2
1527 reverting .hglf/sub2/large6
1532 reverting .hglf/sub2/large6
1528 $ cat sub2/large6
1533 $ cat sub2/large6
1529 large6
1534 large6
1530 $ hg revert --no-backup -C -r '.^' sub2
1535 $ hg revert --no-backup -C -r '.^' sub2
1531 $ hg revert --no-backup sub2
1536 $ hg revert --no-backup sub2
1532 reverting .hglf/sub2/large6
1537 reverting .hglf/sub2/large6
1533 $ hg status
1538 $ hg status
1534
1539
1535 "verify --large" actually verifies largefiles
1540 "verify --large" actually verifies largefiles
1536
1541
1537 - Where Do We Come From? What Are We? Where Are We Going?
1542 - Where Do We Come From? What Are We? Where Are We Going?
1538 $ pwd
1543 $ pwd
1539 $TESTTMP/e
1544 $TESTTMP/e
1540 $ hg paths
1545 $ hg paths
1541 default = $TESTTMP/d
1546 default = $TESTTMP/d
1542
1547
1543 $ hg verify --large
1548 $ hg verify --large
1544 checking changesets
1549 checking changesets
1545 checking manifests
1550 checking manifests
1546 crosschecking files in changesets and manifests
1551 crosschecking files in changesets and manifests
1547 checking files
1552 checking files
1548 checking dirstate
1553 checking dirstate
1549 checked 10 changesets with 28 changes to 10 files
1554 checked 10 changesets with 28 changes to 10 files
1550 searching 1 changesets for largefiles
1555 searching 1 changesets for largefiles
1551 verified existence of 3 revisions of 3 largefiles
1556 verified existence of 3 revisions of 3 largefiles
1552
1557
1553 - introduce missing blob in local store repo and remote store
1558 - introduce missing blob in local store repo and remote store
1554 and make sure that this is caught:
1559 and make sure that this is caught:
1555
1560
1556 $ mv $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 .
1561 $ mv $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 .
1557 $ rm .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1562 $ rm .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1558 $ hg verify --large -q
1563 $ hg verify --large -q
1559 changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1564 changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1560 [1]
1565 [1]
1561
1566
1562 - introduce corruption and make sure that it is caught when checking content:
1567 - introduce corruption and make sure that it is caught when checking content:
1563 $ echo '5 cents' > $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1568 $ echo '5 cents' > $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1564 $ hg verify -q --large --lfc
1569 $ hg verify -q --large --lfc
1565 changeset 9:598410d3eb9a: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1570 changeset 9:598410d3eb9a: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
1566 [1]
1571 [1]
1567
1572
1568 - cleanup
1573 - cleanup
1569 $ cp e166e74c7303192238d60af5a9c4ce9bef0b7928 $TESTTMP/d/.hg/largefiles/
1574 $ cp e166e74c7303192238d60af5a9c4ce9bef0b7928 $TESTTMP/d/.hg/largefiles/
1570 $ mv e166e74c7303192238d60af5a9c4ce9bef0b7928 .hg/largefiles/
1575 $ mv e166e74c7303192238d60af5a9c4ce9bef0b7928 .hg/largefiles/
1571
1576
1572 - verifying all revisions will fail because we didn't clone all largefiles to d:
1577 - verifying all revisions will fail because we didn't clone all largefiles to d:
1573 $ echo 'T-shirt' > $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1578 $ echo 'T-shirt' > $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1574 $ hg verify -q --lfa --lfc
1579 $ hg verify -q --lfa --lfc
1575 changeset 0:30d30fe6a5be: large1 references missing $TESTTMP/d/.hg/largefiles/4669e532d5b2c093a78eca010077e708a071bb64
1580 changeset 0:30d30fe6a5be: large1 references missing $TESTTMP/d/.hg/largefiles/4669e532d5b2c093a78eca010077e708a071bb64
1576 changeset 0:30d30fe6a5be: sub/large2 references missing $TESTTMP/d/.hg/largefiles/1deebade43c8c498a3c8daddac0244dc55d1331d
1581 changeset 0:30d30fe6a5be: sub/large2 references missing $TESTTMP/d/.hg/largefiles/1deebade43c8c498a3c8daddac0244dc55d1331d
1577 changeset 1:ce8896473775: large1 references missing $TESTTMP/d/.hg/largefiles/5f78770c0e77ba4287ad6ef3071c9bf9c379742f
1582 changeset 1:ce8896473775: large1 references missing $TESTTMP/d/.hg/largefiles/5f78770c0e77ba4287ad6ef3071c9bf9c379742f
1578 changeset 1:ce8896473775: sub/large2 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1583 changeset 1:ce8896473775: sub/large2 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1579 changeset 3:9e8fbc4bce62: large1 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1584 changeset 3:9e8fbc4bce62: large1 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1580 changeset 4:74c02385b94c: large3 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1585 changeset 4:74c02385b94c: large3 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1581 changeset 4:74c02385b94c: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1586 changeset 4:74c02385b94c: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1582 changeset 5:9d5af5072dbd: large3 references missing $TESTTMP/d/.hg/largefiles/baaf12afde9d8d67f25dab6dced0d2bf77dba47c
1587 changeset 5:9d5af5072dbd: large3 references missing $TESTTMP/d/.hg/largefiles/baaf12afde9d8d67f25dab6dced0d2bf77dba47c
1583 changeset 5:9d5af5072dbd: sub/large4 references missing $TESTTMP/d/.hg/largefiles/aeb2210d19f02886dde00dac279729a48471e2f9
1588 changeset 5:9d5af5072dbd: sub/large4 references missing $TESTTMP/d/.hg/largefiles/aeb2210d19f02886dde00dac279729a48471e2f9
1584 changeset 6:4355d653f84f: large3 references missing $TESTTMP/d/.hg/largefiles/7838695e10da2bb75ac1156565f40a2595fa2fa0
1589 changeset 6:4355d653f84f: large3 references missing $TESTTMP/d/.hg/largefiles/7838695e10da2bb75ac1156565f40a2595fa2fa0
1585 [1]
1590 [1]
1586
1591
1587 - cleanup
1592 - cleanup
1588 $ rm $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1593 $ rm $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
1589 $ rm -f .hglf/sub/*.orig
1594 $ rm -f .hglf/sub/*.orig
1590
1595
1591 Update to revision with missing largefile - and make sure it really is missing
1596 Update to revision with missing largefile - and make sure it really is missing
1592
1597
1593 $ rm ${USERCACHE}/7838695e10da2bb75ac1156565f40a2595fa2fa0
1598 $ rm ${USERCACHE}/7838695e10da2bb75ac1156565f40a2595fa2fa0
1594 $ hg up -r 6
1599 $ hg up -r 6
1595 getting changed largefiles
1600 getting changed largefiles
1596 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1601 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1597 1 largefiles updated, 2 removed
1602 1 largefiles updated, 2 removed
1598 4 files updated, 0 files merged, 2 files removed, 0 files unresolved
1603 4 files updated, 0 files merged, 2 files removed, 0 files unresolved
1599 $ rm normal3
1604 $ rm normal3
1600 $ echo >> sub/normal4
1605 $ echo >> sub/normal4
1601 $ hg ci -m 'commit with missing files'
1606 $ hg ci -m 'commit with missing files'
1602 Invoking status precommit hook
1607 Invoking status precommit hook
1603 M sub/normal4
1608 M sub/normal4
1604 ! large3
1609 ! large3
1605 ! normal3
1610 ! normal3
1606 created new head
1611 created new head
1607 $ hg st
1612 $ hg st
1608 ! large3
1613 ! large3
1609 ! normal3
1614 ! normal3
1610 $ hg up -r.
1615 $ hg up -r.
1611 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1616 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1612 $ hg st
1617 $ hg st
1613 ! large3
1618 ! large3
1614 ! normal3
1619 ! normal3
1615 $ hg up -Cr.
1620 $ hg up -Cr.
1616 getting changed largefiles
1621 getting changed largefiles
1617 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1622 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1618 0 largefiles updated, 0 removed
1623 0 largefiles updated, 0 removed
1619 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1624 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1620 $ hg st
1625 $ hg st
1621 ! large3
1626 ! large3
1622 $ hg rollback
1627 $ hg rollback
1623 repository tip rolled back to revision 9 (undo commit)
1628 repository tip rolled back to revision 9 (undo commit)
1624 working directory now based on revision 6
1629 working directory now based on revision 6
1625
1630
1626 Merge with revision with missing largefile - and make sure it tries to fetch it.
1631 Merge with revision with missing largefile - and make sure it tries to fetch it.
1627
1632
1628 $ hg up -Cqr null
1633 $ hg up -Cqr null
1629 $ echo f > f
1634 $ echo f > f
1630 $ hg ci -Am branch
1635 $ hg ci -Am branch
1631 adding f
1636 adding f
1632 Invoking status precommit hook
1637 Invoking status precommit hook
1633 A f
1638 A f
1634 created new head
1639 created new head
1635 $ hg merge -r 6
1640 $ hg merge -r 6
1636 getting changed largefiles
1641 getting changed largefiles
1637 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1642 large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob)
1638 1 largefiles updated, 0 removed
1643 1 largefiles updated, 0 removed
1639 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1644 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1640 (branch merge, don't forget to commit)
1645 (branch merge, don't forget to commit)
1641
1646
1642 $ hg rollback -q
1647 $ hg rollback -q
1643 $ hg up -Cq
1648 $ hg up -Cq
1644
1649
1645 Pulling 0 revisions with --all-largefiles should not fetch for all revisions
1650 Pulling 0 revisions with --all-largefiles should not fetch for all revisions
1646
1651
1647 $ hg pull --all-largefiles
1652 $ hg pull --all-largefiles
1648 pulling from $TESTTMP/d
1653 pulling from $TESTTMP/d
1649 searching for changes
1654 searching for changes
1650 no changes found
1655 no changes found
1651
1656
1652 Merging does not revert to old versions of largefiles and also check
1657 Merging does not revert to old versions of largefiles and also check
1653 that merging after having pulled from a non-default remote works
1658 that merging after having pulled from a non-default remote works
1654 correctly.
1659 correctly.
1655
1660
1656 $ cd ..
1661 $ cd ..
1657 $ hg clone -r 7 e temp
1662 $ hg clone -r 7 e temp
1658 adding changesets
1663 adding changesets
1659 adding manifests
1664 adding manifests
1660 adding file changes
1665 adding file changes
1661 added 8 changesets with 24 changes to 10 files
1666 added 8 changesets with 24 changes to 10 files
1662 new changesets 30d30fe6a5be:daea875e9014 (8 drafts)
1667 new changesets 30d30fe6a5be:daea875e9014 (8 drafts)
1663 updating to branch default
1668 updating to branch default
1664 getting changed largefiles
1669 getting changed largefiles
1665 3 largefiles updated, 0 removed
1670 3 largefiles updated, 0 removed
1666 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1671 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1667 $ hg clone temp f
1672 $ hg clone temp f
1668 updating to branch default
1673 updating to branch default
1669 getting changed largefiles
1674 getting changed largefiles
1670 3 largefiles updated, 0 removed
1675 3 largefiles updated, 0 removed
1671 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1676 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1672 # Delete the largefiles in the largefiles system cache so that we have an
1677 # Delete the largefiles in the largefiles system cache so that we have an
1673 # opportunity to test that caching after a pull works.
1678 # opportunity to test that caching after a pull works.
1674 $ rm "${USERCACHE}"/*
1679 $ rm "${USERCACHE}"/*
1675 $ cd f
1680 $ cd f
1676 $ echo "large4-merge-test" > sub/large4
1681 $ echo "large4-merge-test" > sub/large4
1677 $ hg commit -m "Modify large4 to test merge"
1682 $ hg commit -m "Modify large4 to test merge"
1678 Invoking status precommit hook
1683 Invoking status precommit hook
1679 M sub/large4
1684 M sub/large4
1680 # Test --cache-largefiles flag
1685 # Test --cache-largefiles flag
1681 $ hg pull --lfrev 'heads(pulled())' ../e
1686 $ hg pull --lfrev 'heads(pulled())' ../e
1682 pulling from ../e
1687 pulling from ../e
1683 searching for changes
1688 searching for changes
1684 adding changesets
1689 adding changesets
1685 adding manifests
1690 adding manifests
1686 adding file changes
1691 adding file changes
1687 added 2 changesets with 4 changes to 4 files (+1 heads)
1692 added 2 changesets with 4 changes to 4 files (+1 heads)
1688 new changesets a381d2c8c80e:598410d3eb9a (2 drafts)
1693 new changesets a381d2c8c80e:598410d3eb9a (2 drafts)
1689 (run 'hg heads' to see heads, 'hg merge' to merge)
1694 (run 'hg heads' to see heads, 'hg merge' to merge)
1690 2 largefiles cached
1695 2 largefiles cached
1691 $ hg merge
1696 $ hg merge
1692 largefile sub/large4 has a merge conflict
1697 largefile sub/large4 has a merge conflict
1693 ancestor was 971fb41e78fea4f8e0ba5244784239371cb00591
1698 ancestor was 971fb41e78fea4f8e0ba5244784239371cb00591
1694 you can keep (l)ocal d846f26643bfa8ec210be40cc93cc6b7ff1128ea or take (o)ther e166e74c7303192238d60af5a9c4ce9bef0b7928.
1699 you can keep (l)ocal d846f26643bfa8ec210be40cc93cc6b7ff1128ea or take (o)ther e166e74c7303192238d60af5a9c4ce9bef0b7928.
1695 what do you want to do? l
1700 what do you want to do? l
1696 getting changed largefiles
1701 getting changed largefiles
1697 1 largefiles updated, 0 removed
1702 1 largefiles updated, 0 removed
1698 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
1703 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
1699 (branch merge, don't forget to commit)
1704 (branch merge, don't forget to commit)
1700 $ hg commit -m "Merge repos e and f"
1705 $ hg commit -m "Merge repos e and f"
1701 Invoking status precommit hook
1706 Invoking status precommit hook
1702 M normal3
1707 M normal3
1703 M sub/normal4
1708 M sub/normal4
1704 M sub2/large6
1709 M sub2/large6
1705 $ cat normal3
1710 $ cat normal3
1706 normal3-modified
1711 normal3-modified
1707 $ cat sub/normal4
1712 $ cat sub/normal4
1708 normal4-modified
1713 normal4-modified
1709 $ cat sub/large4
1714 $ cat sub/large4
1710 large4-merge-test
1715 large4-merge-test
1711 $ cat sub2/large6
1716 $ cat sub2/large6
1712 large6-modified
1717 large6-modified
1713 $ cat sub2/large7
1718 $ cat sub2/large7
1714 large7
1719 large7
1715
1720
1716 Test status after merging with a branch that introduces a new largefile:
1721 Test status after merging with a branch that introduces a new largefile:
1717
1722
1718 $ echo large > large
1723 $ echo large > large
1719 $ hg add --large large
1724 $ hg add --large large
1720 $ hg commit -m 'add largefile'
1725 $ hg commit -m 'add largefile'
1721 Invoking status precommit hook
1726 Invoking status precommit hook
1722 A large
1727 A large
1723 $ hg update -q ".^"
1728 $ hg update -q ".^"
1724 $ echo change >> normal3
1729 $ echo change >> normal3
1725 $ hg commit -m 'some change'
1730 $ hg commit -m 'some change'
1726 Invoking status precommit hook
1731 Invoking status precommit hook
1727 M normal3
1732 M normal3
1728 created new head
1733 created new head
1729 $ hg merge
1734 $ hg merge
1730 getting changed largefiles
1735 getting changed largefiles
1731 1 largefiles updated, 0 removed
1736 1 largefiles updated, 0 removed
1732 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1737 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1733 (branch merge, don't forget to commit)
1738 (branch merge, don't forget to commit)
1734 $ hg status
1739 $ hg status
1735 M large
1740 M large
1736
1741
1737 - make sure update of merge with removed largefiles fails as expected
1742 - make sure update of merge with removed largefiles fails as expected
1738 $ hg rm sub2/large6
1743 $ hg rm sub2/large6
1739 $ hg up -r.
1744 $ hg up -r.
1740 abort: outstanding uncommitted merge
1745 abort: outstanding uncommitted merge
1741 [20]
1746 [20]
1742
1747
1743 - revert should be able to revert files introduced in a pending merge
1748 - revert should be able to revert files introduced in a pending merge
1744 $ hg revert --all -r .
1749 $ hg revert --all -r .
1745 removing .hglf/large
1750 removing .hglf/large
1746 undeleting .hglf/sub2/large6
1751 undeleting .hglf/sub2/large6
1747
1752
1748 Test that a normal file and a largefile with the same name and path cannot
1753 Test that a normal file and a largefile with the same name and path cannot
1749 coexist.
1754 coexist.
1750
1755
1751 $ rm sub2/large7
1756 $ rm sub2/large7
1752 $ echo "largeasnormal" > sub2/large7
1757 $ echo "largeasnormal" > sub2/large7
1753 $ hg add sub2/large7
1758 $ hg add sub2/large7
1754 sub2/large7 already a largefile
1759 sub2/large7 already a largefile
1755
1760
1756 Test that transplanting a largefile change works correctly.
1761 Test that transplanting a largefile change works correctly.
1757
1762
1758 $ cd ..
1763 $ cd ..
1759 $ hg clone -r 8 d g
1764 $ hg clone -r 8 d g
1760 adding changesets
1765 adding changesets
1761 adding manifests
1766 adding manifests
1762 adding file changes
1767 adding file changes
1763 added 9 changesets with 26 changes to 10 files
1768 added 9 changesets with 26 changes to 10 files
1764 new changesets 30d30fe6a5be:a381d2c8c80e (9 drafts)
1769 new changesets 30d30fe6a5be:a381d2c8c80e (9 drafts)
1765 updating to branch default
1770 updating to branch default
1766 getting changed largefiles
1771 getting changed largefiles
1767 3 largefiles updated, 0 removed
1772 3 largefiles updated, 0 removed
1768 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1773 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
1769 $ cd g
1774 $ cd g
1770 $ hg transplant -s ../d 598410d3eb9a
1775 $ hg transplant -s ../d 598410d3eb9a
1771 searching for changes
1776 searching for changes
1772 searching for changes
1777 searching for changes
1773 adding changesets
1778 adding changesets
1774 adding manifests
1779 adding manifests
1775 adding file changes
1780 adding file changes
1776 added 1 changesets with 2 changes to 2 files
1781 added 1 changesets with 2 changes to 2 files
1777 new changesets 598410d3eb9a (1 drafts)
1782 new changesets 598410d3eb9a (1 drafts)
1778 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1783 $ hg log --template '{rev}:{node|short} {desc|firstline}\n'
1779 9:598410d3eb9a modify normal file largefile in repo d
1784 9:598410d3eb9a modify normal file largefile in repo d
1780 8:a381d2c8c80e modify normal file and largefile in repo b
1785 8:a381d2c8c80e modify normal file and largefile in repo b
1781 7:daea875e9014 add/edit more largefiles
1786 7:daea875e9014 add/edit more largefiles
1782 6:4355d653f84f edit files yet again
1787 6:4355d653f84f edit files yet again
1783 5:9d5af5072dbd edit files again
1788 5:9d5af5072dbd edit files again
1784 4:74c02385b94c move files
1789 4:74c02385b94c move files
1785 3:9e8fbc4bce62 copy files
1790 3:9e8fbc4bce62 copy files
1786 2:51a0ae4d5864 remove files
1791 2:51a0ae4d5864 remove files
1787 1:ce8896473775 edit files
1792 1:ce8896473775 edit files
1788 0:30d30fe6a5be add files
1793 0:30d30fe6a5be add files
1789 $ cat normal3
1794 $ cat normal3
1790 normal3-modified
1795 normal3-modified
1791 $ cat sub/normal4
1796 $ cat sub/normal4
1792 normal4-modified
1797 normal4-modified
1793 $ cat sub/large4
1798 $ cat sub/large4
1794 large4-modified
1799 large4-modified
1795 $ cat sub2/large6
1800 $ cat sub2/large6
1796 large6-modified
1801 large6-modified
1797 $ cat sub2/large7
1802 $ cat sub2/large7
1798 large7
1803 large7
1799
1804
1800 Cat a largefile
1805 Cat a largefile
1801 $ hg cat normal3
1806 $ hg cat normal3
1802 normal3-modified
1807 normal3-modified
1803 $ hg cat sub/large4
1808 $ hg cat sub/large4
1804 large4-modified
1809 large4-modified
1805 $ rm "${USERCACHE}"/*
1810 $ rm "${USERCACHE}"/*
1806 $ hg cat -r a381d2c8c80e -o cat.out sub/large4
1811 $ hg cat -r a381d2c8c80e -o cat.out sub/large4
1807 $ cat cat.out
1812 $ cat cat.out
1808 large4-modified
1813 large4-modified
1809 $ rm cat.out
1814 $ rm cat.out
1810 $ hg cat -r a381d2c8c80e normal3
1815 $ hg cat -r a381d2c8c80e normal3
1811 normal3-modified
1816 normal3-modified
1812 $ hg cat -r '.^' normal3
1817 $ hg cat -r '.^' normal3
1813 normal3-modified
1818 normal3-modified
1814 $ hg cat -r '.^' sub/large4 doesntexist
1819 $ hg cat -r '.^' sub/large4 doesntexist
1815 large4-modified
1820 large4-modified
1816 doesntexist: no such file in rev a381d2c8c80e
1821 doesntexist: no such file in rev a381d2c8c80e
1817 $ hg --cwd sub cat -r '.^' large4
1822 $ hg --cwd sub cat -r '.^' large4
1818 large4-modified
1823 large4-modified
1819 $ hg --cwd sub cat -r '.^' ../normal3
1824 $ hg --cwd sub cat -r '.^' ../normal3
1820 normal3-modified
1825 normal3-modified
1821 Cat a standin
1826 Cat a standin
1822 $ hg cat .hglf/sub/large4
1827 $ hg cat .hglf/sub/large4
1823 e166e74c7303192238d60af5a9c4ce9bef0b7928
1828 e166e74c7303192238d60af5a9c4ce9bef0b7928
1824 $ hg cat .hglf/normal3
1829 $ hg cat .hglf/normal3
1825 .hglf/normal3: no such file in rev 598410d3eb9a
1830 .hglf/normal3: no such file in rev 598410d3eb9a
1826 [1]
1831 [1]
1827
1832
1828 Test that renaming a largefile results in correct output for status
1833 Test that renaming a largefile results in correct output for status
1829
1834
1830 $ hg rename sub/large4 large4-renamed
1835 $ hg rename sub/large4 large4-renamed
1831 $ hg commit -m "test rename output"
1836 $ hg commit -m "test rename output"
1832 Invoking status precommit hook
1837 Invoking status precommit hook
1833 A large4-renamed
1838 A large4-renamed
1834 R sub/large4
1839 R sub/large4
1835 $ cat large4-renamed
1840 $ cat large4-renamed
1836 large4-modified
1841 large4-modified
1837 $ cd sub2
1842 $ cd sub2
1838 $ hg rename large6 large6-renamed
1843 $ hg rename large6 large6-renamed
1839 $ hg st
1844 $ hg st
1840 A sub2/large6-renamed
1845 A sub2/large6-renamed
1841 R sub2/large6
1846 R sub2/large6
1842 $ cd ..
1847 $ cd ..
1843
1848
1844 Test --normal flag
1849 Test --normal flag
1845
1850
1846 $ dd if=/dev/zero bs=2k count=11k > new-largefile 2> /dev/null
1851 $ dd if=/dev/zero bs=2k count=11k > new-largefile 2> /dev/null
1847 $ hg add --normal --large new-largefile
1852 $ hg add --normal --large new-largefile
1848 abort: --normal cannot be used with --large
1853 abort: --normal cannot be used with --large
1849 [255]
1854 [255]
1850 $ hg add --normal new-largefile
1855 $ hg add --normal new-largefile
1851 new-largefile: up to 69 MB of RAM may be required to manage this file
1856 new-largefile: up to 69 MB of RAM may be required to manage this file
1852 (use 'hg revert new-largefile' to cancel the pending addition)
1857 (use 'hg revert new-largefile' to cancel the pending addition)
1853 $ hg revert new-largefile
1858 $ hg revert new-largefile
1854 $ hg --config ui.large-file-limit=22M add --normal new-largefile
1859 $ hg --config ui.large-file-limit=22M add --normal new-largefile
1855
1860
1856 Test explicit commit of switch between normal and largefile - make sure both
1861 Test explicit commit of switch between normal and largefile - make sure both
1857 the add and the remove is committed.
1862 the add and the remove is committed.
1858
1863
1859 $ hg up -qC
1864 $ hg up -qC
1860 $ hg forget normal3 large4-renamed
1865 $ hg forget normal3 large4-renamed
1861 $ hg add --large normal3
1866 $ hg add --large normal3
1862 $ hg add large4-renamed
1867 $ hg add large4-renamed
1863 $ hg commit -m 'swap' normal3 large4-renamed
1868 $ hg commit -m 'swap' normal3 large4-renamed
1864 Invoking status precommit hook
1869 Invoking status precommit hook
1865 A large4-renamed
1870 A large4-renamed
1866 A normal3
1871 A normal3
1867 ? new-largefile
1872 ? new-largefile
1868 ? sub2/large6-renamed
1873 ? sub2/large6-renamed
1869 $ hg mani
1874 $ hg mani
1870 .hglf/normal3
1875 .hglf/normal3
1871 .hglf/sub2/large6
1876 .hglf/sub2/large6
1872 .hglf/sub2/large7
1877 .hglf/sub2/large7
1873 large4-renamed
1878 large4-renamed
1874 sub/normal4
1879 sub/normal4
1875
1880
1876 $ cd ..
1881 $ cd ..
1877
1882
1878
1883
1879
1884
General Comments 0
You need to be logged in to leave comments. Login now