##// END OF EJS Templates
merge with stable
Martin von Zweigbergk -
r45280:16cba0ad merge default
parent child Browse files
Show More
@@ -0,0 +1,145 b''
1 # pyoxidizer.py - Packaging support for PyOxidizer
2 #
3 # Copyright 2020 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 import os
11 import pathlib
12 import shutil
13 import subprocess
14 import sys
15
16 from .downloads import download_entry
17 from .util import (
18 extract_zip_to_directory,
19 process_install_rules,
20 find_vc_runtime_dll,
21 )
22
23
24 STAGING_RULES_WINDOWS = [
25 ('contrib/bash_completion', 'contrib/'),
26 ('contrib/hgk', 'contrib/hgk.tcl'),
27 ('contrib/hgweb.fcgi', 'contrib/'),
28 ('contrib/hgweb.wsgi', 'contrib/'),
29 ('contrib/logo-droplets.svg', 'contrib/'),
30 ('contrib/mercurial.el', 'contrib/'),
31 ('contrib/mq.el', 'contrib/'),
32 ('contrib/tcsh_completion', 'contrib/'),
33 ('contrib/tcsh_completion_build.sh', 'contrib/'),
34 ('contrib/vim/*', 'contrib/vim/'),
35 ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
36 ('contrib/win32/ReadMe.html', 'ReadMe.html'),
37 ('contrib/xml.rnc', 'contrib/'),
38 ('contrib/zsh_completion', 'contrib/'),
39 ('doc/*.html', 'doc/'),
40 ('doc/style.css', 'doc/'),
41 ('COPYING', 'Copying.txt'),
42 ]
43
44 STAGING_RULES_APP = [
45 ('mercurial/helptext/**/*.txt', 'helptext/'),
46 ('mercurial/defaultrc/*.rc', 'defaultrc/'),
47 ('mercurial/locale/**/*', 'locale/'),
48 ('mercurial/templates/**/*', 'templates/'),
49 ]
50
51 STAGING_EXCLUDES_WINDOWS = [
52 "doc/hg-ssh.8.html",
53 ]
54
55
56 def run_pyoxidizer(
57 source_dir: pathlib.Path,
58 build_dir: pathlib.Path,
59 out_dir: pathlib.Path,
60 target_triple: str,
61 ):
62 """Build Mercurial with PyOxidizer and copy additional files into place.
63
64 After successful completion, ``out_dir`` contains files constituting a
65 Mercurial install.
66 """
67 # We need to make gettext binaries available for compiling i18n files.
68 gettext_pkg, gettext_entry = download_entry('gettext', build_dir)
69 gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0]
70
71 gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version'])
72
73 if not gettext_root.exists():
74 extract_zip_to_directory(gettext_pkg, gettext_root)
75 extract_zip_to_directory(gettext_dep_pkg, gettext_root)
76
77 env = dict(os.environ)
78 env["PATH"] = "%s%s%s" % (
79 env["PATH"],
80 os.pathsep,
81 str(gettext_root / "bin"),
82 )
83
84 args = [
85 "pyoxidizer",
86 "build",
87 "--path",
88 str(source_dir / "rust" / "hgcli"),
89 "--release",
90 "--target-triple",
91 target_triple,
92 ]
93
94 subprocess.run(args, env=env, check=True)
95
96 if "windows" in target_triple:
97 target = "app_windows"
98 else:
99 target = "app_posix"
100
101 build_dir = (
102 source_dir / "build" / "pyoxidizer" / target_triple / "release" / target
103 )
104
105 if out_dir.exists():
106 print("purging %s" % out_dir)
107 shutil.rmtree(out_dir)
108
109 # Now assemble all the files from PyOxidizer into the staging directory.
110 shutil.copytree(build_dir, out_dir)
111
112 # Move some of those files around.
113 process_install_rules(STAGING_RULES_APP, build_dir, out_dir)
114 # Nuke the mercurial/* directory, as we copied resources
115 # to an appropriate location just above.
116 shutil.rmtree(out_dir / "mercurial")
117
118 # We also need to run setup.py build_doc to produce html files,
119 # as they aren't built as part of ``pip install``.
120 # This will fail if docutils isn't installed.
121 subprocess.run(
122 [sys.executable, str(source_dir / "setup.py"), "build_doc", "--html"],
123 cwd=str(source_dir),
124 check=True,
125 )
126
127 if "windows" in target_triple:
128 process_install_rules(STAGING_RULES_WINDOWS, source_dir, out_dir)
129
130 # Write out a default editor.rc file to configure notepad as the
131 # default editor.
132 with (out_dir / "defaultrc" / "editor.rc").open(
133 "w", encoding="utf-8"
134 ) as fh:
135 fh.write("[ui]\neditor = notepad\n")
136
137 for f in STAGING_EXCLUDES_WINDOWS:
138 p = out_dir / f
139 if p.exists():
140 print("removing %s" % p)
141 p.unlink()
142
143 # Add vcruntimeXXX.dll next to executable.
144 vc_runtime_dll = find_vc_runtime_dll(x64="x86_64" in target_triple)
145 shutil.copy(vc_runtime_dll, out_dir / vc_runtime_dll.name)
@@ -1,487 +1,548 b''
1 # cli.py - Command line interface for automation
1 # cli.py - Command line interface for automation
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import argparse
10 import argparse
11 import concurrent.futures as futures
11 import concurrent.futures as futures
12 import os
12 import os
13 import pathlib
13 import pathlib
14 import time
14 import time
15
15
16 from . import (
16 from . import (
17 aws,
17 aws,
18 HGAutomation,
18 HGAutomation,
19 linux,
19 linux,
20 try_server,
20 try_server,
21 windows,
21 windows,
22 )
22 )
23
23
24
24
25 SOURCE_ROOT = pathlib.Path(
25 SOURCE_ROOT = pathlib.Path(
26 os.path.abspath(__file__)
26 os.path.abspath(__file__)
27 ).parent.parent.parent.parent
27 ).parent.parent.parent.parent
28 DIST_PATH = SOURCE_ROOT / 'dist'
28 DIST_PATH = SOURCE_ROOT / 'dist'
29
29
30
30
31 def bootstrap_linux_dev(
31 def bootstrap_linux_dev(
32 hga: HGAutomation, aws_region, distros=None, parallel=False
32 hga: HGAutomation, aws_region, distros=None, parallel=False
33 ):
33 ):
34 c = hga.aws_connection(aws_region)
34 c = hga.aws_connection(aws_region)
35
35
36 if distros:
36 if distros:
37 distros = distros.split(',')
37 distros = distros.split(',')
38 else:
38 else:
39 distros = sorted(linux.DISTROS)
39 distros = sorted(linux.DISTROS)
40
40
41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
42 # the context manager that is supposed to terminate the temporary EC2
42 # the context manager that is supposed to terminate the temporary EC2
43 # instance doesn't run. Until we fix this, make parallel building opt-in
43 # instance doesn't run. Until we fix this, make parallel building opt-in
44 # so we don't orphan instances.
44 # so we don't orphan instances.
45 if parallel:
45 if parallel:
46 fs = []
46 fs = []
47
47
48 with futures.ThreadPoolExecutor(len(distros)) as e:
48 with futures.ThreadPoolExecutor(len(distros)) as e:
49 for distro in distros:
49 for distro in distros:
50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
51
51
52 for f in fs:
52 for f in fs:
53 f.result()
53 f.result()
54 else:
54 else:
55 for distro in distros:
55 for distro in distros:
56 aws.ensure_linux_dev_ami(c, distro=distro)
56 aws.ensure_linux_dev_ami(c, distro=distro)
57
57
58
58
59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
60 c = hga.aws_connection(aws_region)
60 c = hga.aws_connection(aws_region)
61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
62 print('Windows development AMI available as %s' % image.id)
62 print('Windows development AMI available as %s' % image.id)
63
63
64
64
65 def build_inno(
65 def build_inno(
66 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
66 hga: HGAutomation,
67 aws_region,
68 python_version,
69 arch,
70 revision,
71 version,
72 base_image_name,
67 ):
73 ):
68 c = hga.aws_connection(aws_region)
74 c = hga.aws_connection(aws_region)
69 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
75 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
70 DIST_PATH.mkdir(exist_ok=True)
76 DIST_PATH.mkdir(exist_ok=True)
71
77
72 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
78 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
73 instance = insts[0]
79 instance = insts[0]
74
80
75 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
81 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
76
82
77 for a in arch:
83 for py_version in python_version:
78 windows.build_inno_installer(
84 for a in arch:
79 instance.winrm_client, a, DIST_PATH, version=version
85 windows.build_inno_installer(
80 )
86 instance.winrm_client,
87 py_version,
88 a,
89 DIST_PATH,
90 version=version,
91 )
81
92
82
93
83 def build_wix(
94 def build_wix(
84 hga: HGAutomation, aws_region, arch, revision, version, base_image_name
95 hga: HGAutomation,
96 aws_region,
97 python_version,
98 arch,
99 revision,
100 version,
101 base_image_name,
85 ):
102 ):
86 c = hga.aws_connection(aws_region)
103 c = hga.aws_connection(aws_region)
87 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
104 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
88 DIST_PATH.mkdir(exist_ok=True)
105 DIST_PATH.mkdir(exist_ok=True)
89
106
90 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
107 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
91 instance = insts[0]
108 instance = insts[0]
92
109
93 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
110 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
94
111
95 for a in arch:
112 for py_version in python_version:
96 windows.build_wix_installer(
113 for a in arch:
97 instance.winrm_client, a, DIST_PATH, version=version
114 windows.build_wix_installer(
98 )
115 instance.winrm_client,
116 py_version,
117 a,
118 DIST_PATH,
119 version=version,
120 )
99
121
100
122
101 def build_windows_wheel(
123 def build_windows_wheel(
102 hga: HGAutomation, aws_region, arch, revision, base_image_name
124 hga: HGAutomation,
125 aws_region,
126 python_version,
127 arch,
128 revision,
129 base_image_name,
103 ):
130 ):
104 c = hga.aws_connection(aws_region)
131 c = hga.aws_connection(aws_region)
105 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
132 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
106 DIST_PATH.mkdir(exist_ok=True)
133 DIST_PATH.mkdir(exist_ok=True)
107
134
108 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
135 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
109 instance = insts[0]
136 instance = insts[0]
110
137
111 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
138 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
112
139
113 for a in arch:
140 for py_version in python_version:
114 windows.build_wheel(instance.winrm_client, a, DIST_PATH)
141 for a in arch:
142 windows.build_wheel(
143 instance.winrm_client, py_version, a, DIST_PATH
144 )
115
145
116
146
117 def build_all_windows_packages(
147 def build_all_windows_packages(
118 hga: HGAutomation, aws_region, revision, version, base_image_name
148 hga: HGAutomation, aws_region, revision, version, base_image_name
119 ):
149 ):
120 c = hga.aws_connection(aws_region)
150 c = hga.aws_connection(aws_region)
121 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
151 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
122 DIST_PATH.mkdir(exist_ok=True)
152 DIST_PATH.mkdir(exist_ok=True)
123
153
124 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
154 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
125 instance = insts[0]
155 instance = insts[0]
126
156
127 winrm_client = instance.winrm_client
157 winrm_client = instance.winrm_client
128
158
129 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
159 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
130
160
131 for arch in ('x86', 'x64'):
161 for py_version in ("2.7", "3.7", "3.8"):
132 windows.purge_hg(winrm_client)
162 for arch in ("x86", "x64"):
133 windows.build_wheel(winrm_client, arch, DIST_PATH)
163 windows.purge_hg(winrm_client)
134 windows.purge_hg(winrm_client)
164 windows.build_wheel(
135 windows.build_inno_installer(
165 winrm_client,
136 winrm_client, arch, DIST_PATH, version=version
166 python_version=py_version,
137 )
167 arch=arch,
138 windows.purge_hg(winrm_client)
168 dest_path=DIST_PATH,
139 windows.build_wix_installer(
169 )
140 winrm_client, arch, DIST_PATH, version=version
170
141 )
171 for py_version in (2, 3):
172 for arch in ('x86', 'x64'):
173 windows.purge_hg(winrm_client)
174 windows.build_inno_installer(
175 winrm_client, py_version, arch, DIST_PATH, version=version
176 )
177 windows.build_wix_installer(
178 winrm_client, py_version, arch, DIST_PATH, version=version
179 )
142
180
143
181
144 def terminate_ec2_instances(hga: HGAutomation, aws_region):
182 def terminate_ec2_instances(hga: HGAutomation, aws_region):
145 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
183 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
146 aws.terminate_ec2_instances(c.ec2resource)
184 aws.terminate_ec2_instances(c.ec2resource)
147
185
148
186
149 def purge_ec2_resources(hga: HGAutomation, aws_region):
187 def purge_ec2_resources(hga: HGAutomation, aws_region):
150 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
188 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
151 aws.remove_resources(c)
189 aws.remove_resources(c)
152
190
153
191
154 def run_tests_linux(
192 def run_tests_linux(
155 hga: HGAutomation,
193 hga: HGAutomation,
156 aws_region,
194 aws_region,
157 instance_type,
195 instance_type,
158 python_version,
196 python_version,
159 test_flags,
197 test_flags,
160 distro,
198 distro,
161 filesystem,
199 filesystem,
162 ):
200 ):
163 c = hga.aws_connection(aws_region)
201 c = hga.aws_connection(aws_region)
164 image = aws.ensure_linux_dev_ami(c, distro=distro)
202 image = aws.ensure_linux_dev_ami(c, distro=distro)
165
203
166 t_start = time.time()
204 t_start = time.time()
167
205
168 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
206 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
169
207
170 with aws.temporary_linux_dev_instances(
208 with aws.temporary_linux_dev_instances(
171 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
209 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
172 ) as insts:
210 ) as insts:
173
211
174 instance = insts[0]
212 instance = insts[0]
175
213
176 linux.prepare_exec_environment(
214 linux.prepare_exec_environment(
177 instance.ssh_client, filesystem=filesystem
215 instance.ssh_client, filesystem=filesystem
178 )
216 )
179 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
217 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
180 t_prepared = time.time()
218 t_prepared = time.time()
181 linux.run_tests(instance.ssh_client, python_version, test_flags)
219 linux.run_tests(instance.ssh_client, python_version, test_flags)
182 t_done = time.time()
220 t_done = time.time()
183
221
184 t_setup = t_prepared - t_start
222 t_setup = t_prepared - t_start
185 t_all = t_done - t_start
223 t_all = t_done - t_start
186
224
187 print(
225 print(
188 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
226 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
189 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
227 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
190 )
228 )
191
229
192
230
193 def run_tests_windows(
231 def run_tests_windows(
194 hga: HGAutomation,
232 hga: HGAutomation,
195 aws_region,
233 aws_region,
196 instance_type,
234 instance_type,
197 python_version,
235 python_version,
198 arch,
236 arch,
199 test_flags,
237 test_flags,
200 base_image_name,
238 base_image_name,
201 ):
239 ):
202 c = hga.aws_connection(aws_region)
240 c = hga.aws_connection(aws_region)
203 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
241 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
204
242
205 with aws.temporary_windows_dev_instances(
243 with aws.temporary_windows_dev_instances(
206 c, image, instance_type, disable_antivirus=True
244 c, image, instance_type, disable_antivirus=True
207 ) as insts:
245 ) as insts:
208 instance = insts[0]
246 instance = insts[0]
209
247
210 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
248 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
211 windows.run_tests(
249 windows.run_tests(
212 instance.winrm_client, python_version, arch, test_flags
250 instance.winrm_client, python_version, arch, test_flags
213 )
251 )
214
252
215
253
216 def publish_windows_artifacts(
254 def publish_windows_artifacts(
217 hg: HGAutomation,
255 hg: HGAutomation,
218 aws_region,
256 aws_region,
219 version: str,
257 version: str,
220 pypi: bool,
258 pypi: bool,
221 mercurial_scm_org: bool,
259 mercurial_scm_org: bool,
222 ssh_username: str,
260 ssh_username: str,
223 ):
261 ):
224 windows.publish_artifacts(
262 windows.publish_artifacts(
225 DIST_PATH,
263 DIST_PATH,
226 version,
264 version,
227 pypi=pypi,
265 pypi=pypi,
228 mercurial_scm_org=mercurial_scm_org,
266 mercurial_scm_org=mercurial_scm_org,
229 ssh_username=ssh_username,
267 ssh_username=ssh_username,
230 )
268 )
231
269
232
270
233 def run_try(hga: HGAutomation, aws_region: str, rev: str):
271 def run_try(hga: HGAutomation, aws_region: str, rev: str):
234 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
272 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
235 try_server.trigger_try(c, rev=rev)
273 try_server.trigger_try(c, rev=rev)
236
274
237
275
238 def get_parser():
276 def get_parser():
239 parser = argparse.ArgumentParser()
277 parser = argparse.ArgumentParser()
240
278
241 parser.add_argument(
279 parser.add_argument(
242 '--state-path',
280 '--state-path',
243 default='~/.hgautomation',
281 default='~/.hgautomation',
244 help='Path for local state files',
282 help='Path for local state files',
245 )
283 )
246 parser.add_argument(
284 parser.add_argument(
247 '--aws-region', help='AWS region to use', default='us-west-2',
285 '--aws-region', help='AWS region to use', default='us-west-2',
248 )
286 )
249
287
250 subparsers = parser.add_subparsers()
288 subparsers = parser.add_subparsers()
251
289
252 sp = subparsers.add_parser(
290 sp = subparsers.add_parser(
253 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
291 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
254 )
292 )
255 sp.add_argument(
293 sp.add_argument(
256 '--distros', help='Comma delimited list of distros to bootstrap',
294 '--distros', help='Comma delimited list of distros to bootstrap',
257 )
295 )
258 sp.add_argument(
296 sp.add_argument(
259 '--parallel',
297 '--parallel',
260 action='store_true',
298 action='store_true',
261 help='Generate AMIs in parallel (not CTRL-c safe)',
299 help='Generate AMIs in parallel (not CTRL-c safe)',
262 )
300 )
263 sp.set_defaults(func=bootstrap_linux_dev)
301 sp.set_defaults(func=bootstrap_linux_dev)
264
302
265 sp = subparsers.add_parser(
303 sp = subparsers.add_parser(
266 'bootstrap-windows-dev',
304 'bootstrap-windows-dev',
267 help='Bootstrap the Windows development environment',
305 help='Bootstrap the Windows development environment',
268 )
306 )
269 sp.add_argument(
307 sp.add_argument(
270 '--base-image-name',
308 '--base-image-name',
271 help='AMI name of base image',
309 help='AMI name of base image',
272 default=aws.WINDOWS_BASE_IMAGE_NAME,
310 default=aws.WINDOWS_BASE_IMAGE_NAME,
273 )
311 )
274 sp.set_defaults(func=bootstrap_windows_dev)
312 sp.set_defaults(func=bootstrap_windows_dev)
275
313
276 sp = subparsers.add_parser(
314 sp = subparsers.add_parser(
277 'build-all-windows-packages', help='Build all Windows packages',
315 'build-all-windows-packages', help='Build all Windows packages',
278 )
316 )
279 sp.add_argument(
317 sp.add_argument(
280 '--revision', help='Mercurial revision to build', default='.',
318 '--revision', help='Mercurial revision to build', default='.',
281 )
319 )
282 sp.add_argument(
320 sp.add_argument(
283 '--version', help='Mercurial version string to use',
321 '--version', help='Mercurial version string to use',
284 )
322 )
285 sp.add_argument(
323 sp.add_argument(
286 '--base-image-name',
324 '--base-image-name',
287 help='AMI name of base image',
325 help='AMI name of base image',
288 default=aws.WINDOWS_BASE_IMAGE_NAME,
326 default=aws.WINDOWS_BASE_IMAGE_NAME,
289 )
327 )
290 sp.set_defaults(func=build_all_windows_packages)
328 sp.set_defaults(func=build_all_windows_packages)
291
329
292 sp = subparsers.add_parser(
330 sp = subparsers.add_parser(
293 'build-inno', help='Build Inno Setup installer(s)',
331 'build-inno', help='Build Inno Setup installer(s)',
294 )
332 )
295 sp.add_argument(
333 sp.add_argument(
334 '--python-version',
335 help='Which version of Python to target',
336 choices={2, 3},
337 type=int,
338 nargs='*',
339 default=[3],
340 )
341 sp.add_argument(
296 '--arch',
342 '--arch',
297 help='Architecture to build for',
343 help='Architecture to build for',
298 choices={'x86', 'x64'},
344 choices={'x86', 'x64'},
299 nargs='*',
345 nargs='*',
300 default=['x64'],
346 default=['x64'],
301 )
347 )
302 sp.add_argument(
348 sp.add_argument(
303 '--revision', help='Mercurial revision to build', default='.',
349 '--revision', help='Mercurial revision to build', default='.',
304 )
350 )
305 sp.add_argument(
351 sp.add_argument(
306 '--version', help='Mercurial version string to use in installer',
352 '--version', help='Mercurial version string to use in installer',
307 )
353 )
308 sp.add_argument(
354 sp.add_argument(
309 '--base-image-name',
355 '--base-image-name',
310 help='AMI name of base image',
356 help='AMI name of base image',
311 default=aws.WINDOWS_BASE_IMAGE_NAME,
357 default=aws.WINDOWS_BASE_IMAGE_NAME,
312 )
358 )
313 sp.set_defaults(func=build_inno)
359 sp.set_defaults(func=build_inno)
314
360
315 sp = subparsers.add_parser(
361 sp = subparsers.add_parser(
316 'build-windows-wheel', help='Build Windows wheel(s)',
362 'build-windows-wheel', help='Build Windows wheel(s)',
317 )
363 )
318 sp.add_argument(
364 sp.add_argument(
365 '--python-version',
366 help='Python version to build for',
367 choices={'2.7', '3.7', '3.8'},
368 nargs='*',
369 default=['3.8'],
370 )
371 sp.add_argument(
319 '--arch',
372 '--arch',
320 help='Architecture to build for',
373 help='Architecture to build for',
321 choices={'x86', 'x64'},
374 choices={'x86', 'x64'},
322 nargs='*',
375 nargs='*',
323 default=['x64'],
376 default=['x64'],
324 )
377 )
325 sp.add_argument(
378 sp.add_argument(
326 '--revision', help='Mercurial revision to build', default='.',
379 '--revision', help='Mercurial revision to build', default='.',
327 )
380 )
328 sp.add_argument(
381 sp.add_argument(
329 '--base-image-name',
382 '--base-image-name',
330 help='AMI name of base image',
383 help='AMI name of base image',
331 default=aws.WINDOWS_BASE_IMAGE_NAME,
384 default=aws.WINDOWS_BASE_IMAGE_NAME,
332 )
385 )
333 sp.set_defaults(func=build_windows_wheel)
386 sp.set_defaults(func=build_windows_wheel)
334
387
335 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
388 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
336 sp.add_argument(
389 sp.add_argument(
390 '--python-version',
391 help='Which version of Python to target',
392 choices={2, 3},
393 type=int,
394 nargs='*',
395 default=[3],
396 )
397 sp.add_argument(
337 '--arch',
398 '--arch',
338 help='Architecture to build for',
399 help='Architecture to build for',
339 choices={'x86', 'x64'},
400 choices={'x86', 'x64'},
340 nargs='*',
401 nargs='*',
341 default=['x64'],
402 default=['x64'],
342 )
403 )
343 sp.add_argument(
404 sp.add_argument(
344 '--revision', help='Mercurial revision to build', default='.',
405 '--revision', help='Mercurial revision to build', default='.',
345 )
406 )
346 sp.add_argument(
407 sp.add_argument(
347 '--version', help='Mercurial version string to use in installer',
408 '--version', help='Mercurial version string to use in installer',
348 )
409 )
349 sp.add_argument(
410 sp.add_argument(
350 '--base-image-name',
411 '--base-image-name',
351 help='AMI name of base image',
412 help='AMI name of base image',
352 default=aws.WINDOWS_BASE_IMAGE_NAME,
413 default=aws.WINDOWS_BASE_IMAGE_NAME,
353 )
414 )
354 sp.set_defaults(func=build_wix)
415 sp.set_defaults(func=build_wix)
355
416
356 sp = subparsers.add_parser(
417 sp = subparsers.add_parser(
357 'terminate-ec2-instances',
418 'terminate-ec2-instances',
358 help='Terminate all active EC2 instances managed by us',
419 help='Terminate all active EC2 instances managed by us',
359 )
420 )
360 sp.set_defaults(func=terminate_ec2_instances)
421 sp.set_defaults(func=terminate_ec2_instances)
361
422
362 sp = subparsers.add_parser(
423 sp = subparsers.add_parser(
363 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
424 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
364 )
425 )
365 sp.set_defaults(func=purge_ec2_resources)
426 sp.set_defaults(func=purge_ec2_resources)
366
427
367 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
428 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
368 sp.add_argument(
429 sp.add_argument(
369 '--distro',
430 '--distro',
370 help='Linux distribution to run tests on',
431 help='Linux distribution to run tests on',
371 choices=linux.DISTROS,
432 choices=linux.DISTROS,
372 default='debian10',
433 default='debian10',
373 )
434 )
374 sp.add_argument(
435 sp.add_argument(
375 '--filesystem',
436 '--filesystem',
376 help='Filesystem type to use',
437 help='Filesystem type to use',
377 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
438 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
378 default='default',
439 default='default',
379 )
440 )
380 sp.add_argument(
441 sp.add_argument(
381 '--instance-type',
442 '--instance-type',
382 help='EC2 instance type to use',
443 help='EC2 instance type to use',
383 default='c5.9xlarge',
444 default='c5.9xlarge',
384 )
445 )
385 sp.add_argument(
446 sp.add_argument(
386 '--python-version',
447 '--python-version',
387 help='Python version to use',
448 help='Python version to use',
388 choices={
449 choices={
389 'system2',
450 'system2',
390 'system3',
451 'system3',
391 '2.7',
452 '2.7',
392 '3.5',
453 '3.5',
393 '3.6',
454 '3.6',
394 '3.7',
455 '3.7',
395 '3.8',
456 '3.8',
396 'pypy',
457 'pypy',
397 'pypy3.5',
458 'pypy3.5',
398 'pypy3.6',
459 'pypy3.6',
399 },
460 },
400 default='system2',
461 default='system2',
401 )
462 )
402 sp.add_argument(
463 sp.add_argument(
403 'test_flags',
464 'test_flags',
404 help='Extra command line flags to pass to run-tests.py',
465 help='Extra command line flags to pass to run-tests.py',
405 nargs='*',
466 nargs='*',
406 )
467 )
407 sp.set_defaults(func=run_tests_linux)
468 sp.set_defaults(func=run_tests_linux)
408
469
409 sp = subparsers.add_parser(
470 sp = subparsers.add_parser(
410 'run-tests-windows', help='Run tests on Windows',
471 'run-tests-windows', help='Run tests on Windows',
411 )
472 )
412 sp.add_argument(
473 sp.add_argument(
413 '--instance-type', help='EC2 instance type to use', default='t3.medium',
474 '--instance-type', help='EC2 instance type to use', default='t3.medium',
414 )
475 )
415 sp.add_argument(
476 sp.add_argument(
416 '--python-version',
477 '--python-version',
417 help='Python version to use',
478 help='Python version to use',
418 choices={'2.7', '3.5', '3.6', '3.7', '3.8'},
479 choices={'2.7', '3.5', '3.6', '3.7', '3.8'},
419 default='2.7',
480 default='2.7',
420 )
481 )
421 sp.add_argument(
482 sp.add_argument(
422 '--arch',
483 '--arch',
423 help='Architecture to test',
484 help='Architecture to test',
424 choices={'x86', 'x64'},
485 choices={'x86', 'x64'},
425 default='x64',
486 default='x64',
426 )
487 )
427 sp.add_argument(
488 sp.add_argument(
428 '--test-flags', help='Extra command line flags to pass to run-tests.py',
489 '--test-flags', help='Extra command line flags to pass to run-tests.py',
429 )
490 )
430 sp.add_argument(
491 sp.add_argument(
431 '--base-image-name',
492 '--base-image-name',
432 help='AMI name of base image',
493 help='AMI name of base image',
433 default=aws.WINDOWS_BASE_IMAGE_NAME,
494 default=aws.WINDOWS_BASE_IMAGE_NAME,
434 )
495 )
435 sp.set_defaults(func=run_tests_windows)
496 sp.set_defaults(func=run_tests_windows)
436
497
437 sp = subparsers.add_parser(
498 sp = subparsers.add_parser(
438 'publish-windows-artifacts',
499 'publish-windows-artifacts',
439 help='Publish built Windows artifacts (wheels, installers, etc)',
500 help='Publish built Windows artifacts (wheels, installers, etc)',
440 )
501 )
441 sp.add_argument(
502 sp.add_argument(
442 '--no-pypi',
503 '--no-pypi',
443 dest='pypi',
504 dest='pypi',
444 action='store_false',
505 action='store_false',
445 default=True,
506 default=True,
446 help='Skip uploading to PyPI',
507 help='Skip uploading to PyPI',
447 )
508 )
448 sp.add_argument(
509 sp.add_argument(
449 '--no-mercurial-scm-org',
510 '--no-mercurial-scm-org',
450 dest='mercurial_scm_org',
511 dest='mercurial_scm_org',
451 action='store_false',
512 action='store_false',
452 default=True,
513 default=True,
453 help='Skip uploading to www.mercurial-scm.org',
514 help='Skip uploading to www.mercurial-scm.org',
454 )
515 )
455 sp.add_argument(
516 sp.add_argument(
456 '--ssh-username', help='SSH username for mercurial-scm.org',
517 '--ssh-username', help='SSH username for mercurial-scm.org',
457 )
518 )
458 sp.add_argument(
519 sp.add_argument(
459 'version', help='Mercurial version string to locate local packages',
520 'version', help='Mercurial version string to locate local packages',
460 )
521 )
461 sp.set_defaults(func=publish_windows_artifacts)
522 sp.set_defaults(func=publish_windows_artifacts)
462
523
463 sp = subparsers.add_parser(
524 sp = subparsers.add_parser(
464 'try', help='Run CI automation against a custom changeset'
525 'try', help='Run CI automation against a custom changeset'
465 )
526 )
466 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
527 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
467 sp.set_defaults(func=run_try)
528 sp.set_defaults(func=run_try)
468
529
469 return parser
530 return parser
470
531
471
532
472 def main():
533 def main():
473 parser = get_parser()
534 parser = get_parser()
474 args = parser.parse_args()
535 args = parser.parse_args()
475
536
476 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
537 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
477 automation = HGAutomation(local_state_path)
538 automation = HGAutomation(local_state_path)
478
539
479 if not hasattr(args, 'func'):
540 if not hasattr(args, 'func'):
480 parser.print_help()
541 parser.print_help()
481 return
542 return
482
543
483 kwargs = dict(vars(args))
544 kwargs = dict(vars(args))
484 del kwargs['func']
545 del kwargs['func']
485 del kwargs['state_path']
546 del kwargs['state_path']
486
547
487 args.func(automation, **kwargs)
548 args.func(automation, **kwargs)
@@ -1,595 +1,597 b''
1 # linux.py - Linux specific automation functionality
1 # linux.py - Linux specific automation functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shlex
12 import shlex
13 import subprocess
13 import subprocess
14 import tempfile
14 import tempfile
15
15
16 from .ssh import exec_command
16 from .ssh import exec_command
17
17
18
18
19 # Linux distributions that are supported.
19 # Linux distributions that are supported.
20 DISTROS = {
20 DISTROS = {
21 'debian9',
21 'debian9',
22 'debian10',
22 'debian10',
23 'ubuntu18.04',
23 'ubuntu18.04',
24 'ubuntu19.04',
24 'ubuntu19.04',
25 }
25 }
26
26
27 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 PYENV3_VERSIONS="3.5.9 3.6.10 3.7.7 3.8.2 pypy3.5-7.0.0 pypy3.6-7.3.0"
29 PYENV3_VERSIONS="3.5.9 3.6.10 3.7.7 3.8.2 pypy3.5-7.0.0 pypy3.6-7.3.0"
30
30
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
33 git checkout 3005c4664372ae13fbe376be699313eb428c8bdd
33 git checkout 3005c4664372ae13fbe376be699313eb428c8bdd
34 popd
34 popd
35
35
36 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
37 export PATH="$PYENV_ROOT/bin:$PATH"
37 export PATH="$PYENV_ROOT/bin:$PATH"
38
38
39 # pip 19.2.3.
39 # pip 19.2.3.
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
40 PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
41 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
42 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
43
43
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
44 VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
45 VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48
48
49 for v in ${PYENV2_VERSIONS}; do
49 for v in ${PYENV2_VERSIONS}; do
50 pyenv install -v ${v}
50 pyenv install -v ${v}
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 done
54 done
55
55
56 for v in ${PYENV3_VERSIONS}; do
56 for v in ${PYENV3_VERSIONS}; do
57 pyenv install -v ${v}
57 pyenv install -v ${v}
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
59 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
60 done
60 done
61
61
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
62 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
63 '''.lstrip().replace(
63 '''.lstrip().replace(
64 '\r\n', '\n'
64 '\r\n', '\n'
65 )
65 )
66
66
67
67
68 INSTALL_RUST = r'''
68 INSTALL_RUST = r'''
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
69 RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
70 wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
71 echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check -
72
72
73 chmod +x rustup-init
73 chmod +x rustup-init
74 sudo -H -u hg -g hg ./rustup-init -y
74 sudo -H -u hg -g hg ./rustup-init -y
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.34.2
75 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.42.0
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
76 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy
77
78 sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.7.0 pyoxidizer
77 '''
79 '''
78
80
79
81
80 BOOTSTRAP_VIRTUALENV = r'''
82 BOOTSTRAP_VIRTUALENV = r'''
81 /usr/bin/virtualenv /hgdev/venv-bootstrap
83 /usr/bin/virtualenv /hgdev/venv-bootstrap
82
84
83 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
85 HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f
84 HG_TARBALL=mercurial-5.1.1.tar.gz
86 HG_TARBALL=mercurial-5.1.1.tar.gz
85
87
86 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
88 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
87 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
89 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
88
90
89 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
91 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
90 '''.lstrip().replace(
92 '''.lstrip().replace(
91 '\r\n', '\n'
93 '\r\n', '\n'
92 )
94 )
93
95
94
96
95 BOOTSTRAP_DEBIAN = (
97 BOOTSTRAP_DEBIAN = (
96 r'''
98 r'''
97 #!/bin/bash
99 #!/bin/bash
98
100
99 set -ex
101 set -ex
100
102
101 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
103 DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'`
102 DEBIAN_VERSION=`cat /etc/debian_version`
104 DEBIAN_VERSION=`cat /etc/debian_version`
103 LSB_RELEASE=`lsb_release -cs`
105 LSB_RELEASE=`lsb_release -cs`
104
106
105 sudo /usr/sbin/groupadd hg
107 sudo /usr/sbin/groupadd hg
106 sudo /usr/sbin/groupadd docker
108 sudo /usr/sbin/groupadd docker
107 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
109 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
108 sudo mkdir /home/hg/.ssh
110 sudo mkdir /home/hg/.ssh
109 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
111 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
110 sudo chown -R hg:hg /home/hg/.ssh
112 sudo chown -R hg:hg /home/hg/.ssh
111 sudo chmod 700 /home/hg/.ssh
113 sudo chmod 700 /home/hg/.ssh
112 sudo chmod 600 /home/hg/.ssh/authorized_keys
114 sudo chmod 600 /home/hg/.ssh/authorized_keys
113
115
114 cat << EOF | sudo tee /etc/sudoers.d/90-hg
116 cat << EOF | sudo tee /etc/sudoers.d/90-hg
115 hg ALL=(ALL) NOPASSWD:ALL
117 hg ALL=(ALL) NOPASSWD:ALL
116 EOF
118 EOF
117
119
118 sudo apt-get update
120 sudo apt-get update
119 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
121 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
120
122
121 # Install packages necessary to set up Docker Apt repo.
123 # Install packages necessary to set up Docker Apt repo.
122 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
124 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
123 apt-transport-https \
125 apt-transport-https \
124 gnupg
126 gnupg
125
127
126 cat > docker-apt-key << EOF
128 cat > docker-apt-key << EOF
127 -----BEGIN PGP PUBLIC KEY BLOCK-----
129 -----BEGIN PGP PUBLIC KEY BLOCK-----
128
130
129 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
131 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
130 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
132 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
131 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
133 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
132 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
134 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
133 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
135 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
134 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
136 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
135 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
137 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
136 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
138 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
137 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
139 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
138 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
140 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
139 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
141 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
140 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
142 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
141 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
143 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
142 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
144 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
143 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
145 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
144 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
146 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
145 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
147 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
146 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
148 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
147 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
149 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
148 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
150 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
149 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
151 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
150 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
152 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
151 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
153 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
152 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
154 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
153 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
155 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
154 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
156 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
155 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
157 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
156 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
158 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
157 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
159 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
158 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
160 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
159 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
161 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
160 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
162 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
161 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
163 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
162 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
164 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
163 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
165 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
164 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
166 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
165 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
167 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
166 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
168 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
167 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
169 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
168 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
170 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
169 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
171 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
170 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
172 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
171 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
173 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
172 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
174 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
173 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
175 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
174 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
176 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
175 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
177 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
176 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
178 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
177 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
179 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
178 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
180 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
179 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
181 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
180 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
182 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
181 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
183 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
182 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
184 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
183 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
185 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
184 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
186 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
185 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
187 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
186 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
188 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
187 =0YYh
189 =0YYh
188 -----END PGP PUBLIC KEY BLOCK-----
190 -----END PGP PUBLIC KEY BLOCK-----
189 EOF
191 EOF
190
192
191 sudo apt-key add docker-apt-key
193 sudo apt-key add docker-apt-key
192
194
193 if [ "$LSB_RELEASE" = "stretch" ]; then
195 if [ "$LSB_RELEASE" = "stretch" ]; then
194 cat << EOF | sudo tee -a /etc/apt/sources.list
196 cat << EOF | sudo tee -a /etc/apt/sources.list
195 # Need backports for clang-format-6.0
197 # Need backports for clang-format-6.0
196 deb http://deb.debian.org/debian stretch-backports main
198 deb http://deb.debian.org/debian stretch-backports main
197 EOF
199 EOF
198 fi
200 fi
199
201
200 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
202 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then
201 cat << EOF | sudo tee -a /etc/apt/sources.list
203 cat << EOF | sudo tee -a /etc/apt/sources.list
202 # Sources are useful if we want to compile things locally.
204 # Sources are useful if we want to compile things locally.
203 deb-src http://deb.debian.org/debian $LSB_RELEASE main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE main
204 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
206 deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main
205 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
207 deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main
206 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
208 deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main
207
209
208 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
210 deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable
209 EOF
211 EOF
210
212
211 elif [ "$DISTRO" = "Ubuntu" ]; then
213 elif [ "$DISTRO" = "Ubuntu" ]; then
212 cat << EOF | sudo tee -a /etc/apt/sources.list
214 cat << EOF | sudo tee -a /etc/apt/sources.list
213 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
215 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
214 EOF
216 EOF
215
217
216 fi
218 fi
217
219
218 sudo apt-get update
220 sudo apt-get update
219
221
220 PACKAGES="\
222 PACKAGES="\
221 awscli \
223 awscli \
222 btrfs-progs \
224 btrfs-progs \
223 build-essential \
225 build-essential \
224 bzr \
226 bzr \
225 clang-format-6.0 \
227 clang-format-6.0 \
226 cvs \
228 cvs \
227 darcs \
229 darcs \
228 debhelper \
230 debhelper \
229 devscripts \
231 devscripts \
230 docker-ce \
232 docker-ce \
231 dpkg-dev \
233 dpkg-dev \
232 dstat \
234 dstat \
233 emacs \
235 emacs \
234 gettext \
236 gettext \
235 git \
237 git \
236 htop \
238 htop \
237 iotop \
239 iotop \
238 jfsutils \
240 jfsutils \
239 libbz2-dev \
241 libbz2-dev \
240 libexpat1-dev \
242 libexpat1-dev \
241 libffi-dev \
243 libffi-dev \
242 libgdbm-dev \
244 libgdbm-dev \
243 liblzma-dev \
245 liblzma-dev \
244 libncurses5-dev \
246 libncurses5-dev \
245 libnss3-dev \
247 libnss3-dev \
246 libreadline-dev \
248 libreadline-dev \
247 libsqlite3-dev \
249 libsqlite3-dev \
248 libssl-dev \
250 libssl-dev \
249 netbase \
251 netbase \
250 ntfs-3g \
252 ntfs-3g \
251 nvme-cli \
253 nvme-cli \
252 pyflakes \
254 pyflakes \
253 pyflakes3 \
255 pyflakes3 \
254 pylint \
256 pylint \
255 pylint3 \
257 pylint3 \
256 python-all-dev \
258 python-all-dev \
257 python-dev \
259 python-dev \
258 python-docutils \
260 python-docutils \
259 python-fuzzywuzzy \
261 python-fuzzywuzzy \
260 python-pygments \
262 python-pygments \
261 python-subversion \
263 python-subversion \
262 python-vcr \
264 python-vcr \
263 python3-boto3 \
265 python3-boto3 \
264 python3-dev \
266 python3-dev \
265 python3-docutils \
267 python3-docutils \
266 python3-fuzzywuzzy \
268 python3-fuzzywuzzy \
267 python3-pygments \
269 python3-pygments \
268 python3-vcr \
270 python3-vcr \
269 python3-venv \
271 python3-venv \
270 rsync \
272 rsync \
271 sqlite3 \
273 sqlite3 \
272 subversion \
274 subversion \
273 tcl-dev \
275 tcl-dev \
274 tk-dev \
276 tk-dev \
275 tla \
277 tla \
276 unzip \
278 unzip \
277 uuid-dev \
279 uuid-dev \
278 vim \
280 vim \
279 virtualenv \
281 virtualenv \
280 wget \
282 wget \
281 xfsprogs \
283 xfsprogs \
282 zip \
284 zip \
283 zlib1g-dev"
285 zlib1g-dev"
284
286
285 if [ "LSB_RELEASE" = "stretch" ]; then
287 if [ "LSB_RELEASE" = "stretch" ]; then
286 PACKAGES="$PACKAGES linux-perf"
288 PACKAGES="$PACKAGES linux-perf"
287 elif [ "$DISTRO" = "Ubuntu" ]; then
289 elif [ "$DISTRO" = "Ubuntu" ]; then
288 PACKAGES="$PACKAGES linux-tools-common"
290 PACKAGES="$PACKAGES linux-tools-common"
289 fi
291 fi
290
292
291 # Monotone only available in older releases.
293 # Monotone only available in older releases.
292 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
294 if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then
293 PACKAGES="$PACKAGES monotone"
295 PACKAGES="$PACKAGES monotone"
294 fi
296 fi
295
297
296 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
298 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
297
299
298 # Create clang-format symlink so test harness finds it.
300 # Create clang-format symlink so test harness finds it.
299 sudo update-alternatives --install /usr/bin/clang-format clang-format \
301 sudo update-alternatives --install /usr/bin/clang-format clang-format \
300 /usr/bin/clang-format-6.0 1000
302 /usr/bin/clang-format-6.0 1000
301
303
302 sudo mkdir /hgdev
304 sudo mkdir /hgdev
303 # Will be normalized to hg:hg later.
305 # Will be normalized to hg:hg later.
304 sudo chown `whoami` /hgdev
306 sudo chown `whoami` /hgdev
305
307
306 {install_rust}
308 {install_rust}
307
309
308 cp requirements-py2.txt /hgdev/requirements-py2.txt
310 cp requirements-py2.txt /hgdev/requirements-py2.txt
309 cp requirements-py3.txt /hgdev/requirements-py3.txt
311 cp requirements-py3.txt /hgdev/requirements-py3.txt
310
312
311 # Disable the pip version check because it uses the network and can
313 # Disable the pip version check because it uses the network and can
312 # be annoying.
314 # be annoying.
313 cat << EOF | sudo tee -a /etc/pip.conf
315 cat << EOF | sudo tee -a /etc/pip.conf
314 [global]
316 [global]
315 disable-pip-version-check = True
317 disable-pip-version-check = True
316 EOF
318 EOF
317
319
318 {install_pythons}
320 {install_pythons}
319 {bootstrap_virtualenv}
321 {bootstrap_virtualenv}
320
322
321 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
323 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
322
324
323 # Mark the repo as non-publishing.
325 # Mark the repo as non-publishing.
324 cat >> /hgdev/src/.hg/hgrc << EOF
326 cat >> /hgdev/src/.hg/hgrc << EOF
325 [phases]
327 [phases]
326 publish = false
328 publish = false
327 EOF
329 EOF
328
330
329 sudo chown -R hg:hg /hgdev
331 sudo chown -R hg:hg /hgdev
330 '''.lstrip()
332 '''.lstrip()
331 .format(
333 .format(
332 install_rust=INSTALL_RUST,
334 install_rust=INSTALL_RUST,
333 install_pythons=INSTALL_PYTHONS,
335 install_pythons=INSTALL_PYTHONS,
334 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
336 bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
335 )
337 )
336 .replace('\r\n', '\n')
338 .replace('\r\n', '\n')
337 )
339 )
338
340
339
341
340 # Prepares /hgdev for operations.
342 # Prepares /hgdev for operations.
341 PREPARE_HGDEV = '''
343 PREPARE_HGDEV = '''
342 #!/bin/bash
344 #!/bin/bash
343
345
344 set -e
346 set -e
345
347
346 FS=$1
348 FS=$1
347
349
348 ensure_device() {
350 ensure_device() {
349 if [ -z "${DEVICE}" ]; then
351 if [ -z "${DEVICE}" ]; then
350 echo "could not find block device to format"
352 echo "could not find block device to format"
351 exit 1
353 exit 1
352 fi
354 fi
353 }
355 }
354
356
355 # Determine device to partition for extra filesystem.
357 # Determine device to partition for extra filesystem.
356 # If only 1 volume is present, it will be the root volume and
358 # If only 1 volume is present, it will be the root volume and
357 # should be /dev/nvme0. If multiple volumes are present, the
359 # should be /dev/nvme0. If multiple volumes are present, the
358 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
360 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
359 # a partition.
361 # a partition.
360 if [ -e /dev/nvme1n1 ]; then
362 if [ -e /dev/nvme1n1 ]; then
361 if [ -e /dev/nvme0n1p1 ]; then
363 if [ -e /dev/nvme0n1p1 ]; then
362 DEVICE=/dev/nvme1n1
364 DEVICE=/dev/nvme1n1
363 else
365 else
364 DEVICE=/dev/nvme0n1
366 DEVICE=/dev/nvme0n1
365 fi
367 fi
366 else
368 else
367 DEVICE=
369 DEVICE=
368 fi
370 fi
369
371
370 sudo mkdir /hgwork
372 sudo mkdir /hgwork
371
373
372 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
374 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
373 ensure_device
375 ensure_device
374 echo "creating ${FS} filesystem on ${DEVICE}"
376 echo "creating ${FS} filesystem on ${DEVICE}"
375 fi
377 fi
376
378
377 if [ "${FS}" = "default" ]; then
379 if [ "${FS}" = "default" ]; then
378 :
380 :
379
381
380 elif [ "${FS}" = "btrfs" ]; then
382 elif [ "${FS}" = "btrfs" ]; then
381 sudo mkfs.btrfs ${DEVICE}
383 sudo mkfs.btrfs ${DEVICE}
382 sudo mount ${DEVICE} /hgwork
384 sudo mount ${DEVICE} /hgwork
383
385
384 elif [ "${FS}" = "ext3" ]; then
386 elif [ "${FS}" = "ext3" ]; then
385 # lazy_journal_init speeds up filesystem creation at the expense of
387 # lazy_journal_init speeds up filesystem creation at the expense of
386 # integrity if things crash. We are an ephemeral instance, so we don't
388 # integrity if things crash. We are an ephemeral instance, so we don't
387 # care about integrity.
389 # care about integrity.
388 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
390 sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
389 sudo mount ${DEVICE} /hgwork
391 sudo mount ${DEVICE} /hgwork
390
392
391 elif [ "${FS}" = "ext4" ]; then
393 elif [ "${FS}" = "ext4" ]; then
392 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
394 sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
393 sudo mount ${DEVICE} /hgwork
395 sudo mount ${DEVICE} /hgwork
394
396
395 elif [ "${FS}" = "jfs" ]; then
397 elif [ "${FS}" = "jfs" ]; then
396 sudo mkfs.jfs ${DEVICE}
398 sudo mkfs.jfs ${DEVICE}
397 sudo mount ${DEVICE} /hgwork
399 sudo mount ${DEVICE} /hgwork
398
400
399 elif [ "${FS}" = "tmpfs" ]; then
401 elif [ "${FS}" = "tmpfs" ]; then
400 echo "creating tmpfs volume in /hgwork"
402 echo "creating tmpfs volume in /hgwork"
401 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
403 sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
402
404
403 elif [ "${FS}" = "xfs" ]; then
405 elif [ "${FS}" = "xfs" ]; then
404 sudo mkfs.xfs ${DEVICE}
406 sudo mkfs.xfs ${DEVICE}
405 sudo mount ${DEVICE} /hgwork
407 sudo mount ${DEVICE} /hgwork
406
408
407 else
409 else
408 echo "unsupported filesystem: ${FS}"
410 echo "unsupported filesystem: ${FS}"
409 exit 1
411 exit 1
410 fi
412 fi
411
413
412 echo "/hgwork ready"
414 echo "/hgwork ready"
413
415
414 sudo chown hg:hg /hgwork
416 sudo chown hg:hg /hgwork
415 mkdir /hgwork/tmp
417 mkdir /hgwork/tmp
416 chown hg:hg /hgwork/tmp
418 chown hg:hg /hgwork/tmp
417
419
418 rsync -a /hgdev/src /hgwork/
420 rsync -a /hgdev/src /hgwork/
419 '''.lstrip().replace(
421 '''.lstrip().replace(
420 '\r\n', '\n'
422 '\r\n', '\n'
421 )
423 )
422
424
423
425
424 HG_UPDATE_CLEAN = '''
426 HG_UPDATE_CLEAN = '''
425 set -ex
427 set -ex
426
428
427 HG=/hgdev/venv-bootstrap/bin/hg
429 HG=/hgdev/venv-bootstrap/bin/hg
428
430
429 cd /hgwork/src
431 cd /hgwork/src
430 ${HG} --config extensions.purge= purge --all
432 ${HG} --config extensions.purge= purge --all
431 ${HG} update -C $1
433 ${HG} update -C $1
432 ${HG} log -r .
434 ${HG} log -r .
433 '''.lstrip().replace(
435 '''.lstrip().replace(
434 '\r\n', '\n'
436 '\r\n', '\n'
435 )
437 )
436
438
437
439
438 def prepare_exec_environment(ssh_client, filesystem='default'):
440 def prepare_exec_environment(ssh_client, filesystem='default'):
439 """Prepare an EC2 instance to execute things.
441 """Prepare an EC2 instance to execute things.
440
442
441 The AMI has an ``/hgdev`` bootstrapped with various Python installs
443 The AMI has an ``/hgdev`` bootstrapped with various Python installs
442 and a clone of the Mercurial repo.
444 and a clone of the Mercurial repo.
443
445
444 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
446 In EC2, EBS volumes launched from snapshots have wonky performance behavior.
445 Notably, blocks have to be copied on first access, which makes volume
447 Notably, blocks have to be copied on first access, which makes volume
446 I/O extremely slow on fresh volumes.
448 I/O extremely slow on fresh volumes.
447
449
448 Furthermore, we may want to run operations, tests, etc on alternative
450 Furthermore, we may want to run operations, tests, etc on alternative
449 filesystems so we examine behavior on different filesystems.
451 filesystems so we examine behavior on different filesystems.
450
452
451 This function is used to facilitate executing operations on alternate
453 This function is used to facilitate executing operations on alternate
452 volumes.
454 volumes.
453 """
455 """
454 sftp = ssh_client.open_sftp()
456 sftp = ssh_client.open_sftp()
455
457
456 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
458 with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
457 fh.write(PREPARE_HGDEV)
459 fh.write(PREPARE_HGDEV)
458 fh.chmod(0o0777)
460 fh.chmod(0o0777)
459
461
460 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
462 command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
461 chan, stdin, stdout = exec_command(ssh_client, command)
463 chan, stdin, stdout = exec_command(ssh_client, command)
462 stdin.close()
464 stdin.close()
463
465
464 for line in stdout:
466 for line in stdout:
465 print(line, end='')
467 print(line, end='')
466
468
467 res = chan.recv_exit_status()
469 res = chan.recv_exit_status()
468
470
469 if res:
471 if res:
470 raise Exception('non-0 exit code updating working directory; %d' % res)
472 raise Exception('non-0 exit code updating working directory; %d' % res)
471
473
472
474
473 def synchronize_hg(
475 def synchronize_hg(
474 source_path: pathlib.Path, ec2_instance, revision: str = None
476 source_path: pathlib.Path, ec2_instance, revision: str = None
475 ):
477 ):
476 """Synchronize a local Mercurial source path to remote EC2 instance."""
478 """Synchronize a local Mercurial source path to remote EC2 instance."""
477
479
478 with tempfile.TemporaryDirectory() as temp_dir:
480 with tempfile.TemporaryDirectory() as temp_dir:
479 temp_dir = pathlib.Path(temp_dir)
481 temp_dir = pathlib.Path(temp_dir)
480
482
481 ssh_dir = temp_dir / '.ssh'
483 ssh_dir = temp_dir / '.ssh'
482 ssh_dir.mkdir()
484 ssh_dir.mkdir()
483 ssh_dir.chmod(0o0700)
485 ssh_dir.chmod(0o0700)
484
486
485 public_ip = ec2_instance.public_ip_address
487 public_ip = ec2_instance.public_ip_address
486
488
487 ssh_config = ssh_dir / 'config'
489 ssh_config = ssh_dir / 'config'
488
490
489 with ssh_config.open('w', encoding='utf-8') as fh:
491 with ssh_config.open('w', encoding='utf-8') as fh:
490 fh.write('Host %s\n' % public_ip)
492 fh.write('Host %s\n' % public_ip)
491 fh.write(' User hg\n')
493 fh.write(' User hg\n')
492 fh.write(' StrictHostKeyChecking no\n')
494 fh.write(' StrictHostKeyChecking no\n')
493 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
495 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
494 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
496 fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
495
497
496 if not (source_path / '.hg').is_dir():
498 if not (source_path / '.hg').is_dir():
497 raise Exception(
499 raise Exception(
498 '%s is not a Mercurial repository; synchronization '
500 '%s is not a Mercurial repository; synchronization '
499 'not yet supported' % source_path
501 'not yet supported' % source_path
500 )
502 )
501
503
502 env = dict(os.environ)
504 env = dict(os.environ)
503 env['HGPLAIN'] = '1'
505 env['HGPLAIN'] = '1'
504 env['HGENCODING'] = 'utf-8'
506 env['HGENCODING'] = 'utf-8'
505
507
506 hg_bin = source_path / 'hg'
508 hg_bin = source_path / 'hg'
507
509
508 res = subprocess.run(
510 res = subprocess.run(
509 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
511 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
510 cwd=str(source_path),
512 cwd=str(source_path),
511 env=env,
513 env=env,
512 check=True,
514 check=True,
513 capture_output=True,
515 capture_output=True,
514 )
516 )
515
517
516 full_revision = res.stdout.decode('ascii')
518 full_revision = res.stdout.decode('ascii')
517
519
518 args = [
520 args = [
519 'python2.7',
521 'python2.7',
520 str(hg_bin),
522 str(hg_bin),
521 '--config',
523 '--config',
522 'ui.ssh=ssh -F %s' % ssh_config,
524 'ui.ssh=ssh -F %s' % ssh_config,
523 '--config',
525 '--config',
524 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
526 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
525 # Also ensure .hgtags changes are present so auto version
527 # Also ensure .hgtags changes are present so auto version
526 # calculation works.
528 # calculation works.
527 'push',
529 'push',
528 '-f',
530 '-f',
529 '-r',
531 '-r',
530 full_revision,
532 full_revision,
531 '-r',
533 '-r',
532 'file(.hgtags)',
534 'file(.hgtags)',
533 'ssh://%s//hgwork/src' % public_ip,
535 'ssh://%s//hgwork/src' % public_ip,
534 ]
536 ]
535
537
536 res = subprocess.run(args, cwd=str(source_path), env=env)
538 res = subprocess.run(args, cwd=str(source_path), env=env)
537
539
538 # Allow 1 (no-op) to not trigger error.
540 # Allow 1 (no-op) to not trigger error.
539 if res.returncode not in (0, 1):
541 if res.returncode not in (0, 1):
540 res.check_returncode()
542 res.check_returncode()
541
543
542 # TODO support synchronizing dirty working directory.
544 # TODO support synchronizing dirty working directory.
543
545
544 sftp = ec2_instance.ssh_client.open_sftp()
546 sftp = ec2_instance.ssh_client.open_sftp()
545
547
546 with sftp.open('/hgdev/hgup', 'wb') as fh:
548 with sftp.open('/hgdev/hgup', 'wb') as fh:
547 fh.write(HG_UPDATE_CLEAN)
549 fh.write(HG_UPDATE_CLEAN)
548 fh.chmod(0o0700)
550 fh.chmod(0o0700)
549
551
550 chan, stdin, stdout = exec_command(
552 chan, stdin, stdout = exec_command(
551 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
553 ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
552 )
554 )
553 stdin.close()
555 stdin.close()
554
556
555 for line in stdout:
557 for line in stdout:
556 print(line, end='')
558 print(line, end='')
557
559
558 res = chan.recv_exit_status()
560 res = chan.recv_exit_status()
559
561
560 if res:
562 if res:
561 raise Exception(
563 raise Exception(
562 'non-0 exit code updating working directory; %d' % res
564 'non-0 exit code updating working directory; %d' % res
563 )
565 )
564
566
565
567
566 def run_tests(ssh_client, python_version, test_flags=None):
568 def run_tests(ssh_client, python_version, test_flags=None):
567 """Run tests on a remote Linux machine via an SSH client."""
569 """Run tests on a remote Linux machine via an SSH client."""
568 test_flags = test_flags or []
570 test_flags = test_flags or []
569
571
570 print('running tests')
572 print('running tests')
571
573
572 if python_version == 'system2':
574 if python_version == 'system2':
573 python = '/usr/bin/python2'
575 python = '/usr/bin/python2'
574 elif python_version == 'system3':
576 elif python_version == 'system3':
575 python = '/usr/bin/python3'
577 python = '/usr/bin/python3'
576 elif python_version.startswith('pypy'):
578 elif python_version.startswith('pypy'):
577 python = '/hgdev/pyenv/shims/%s' % python_version
579 python = '/hgdev/pyenv/shims/%s' % python_version
578 else:
580 else:
579 python = '/hgdev/pyenv/shims/python%s' % python_version
581 python = '/hgdev/pyenv/shims/python%s' % python_version
580
582
581 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
583 test_flags = ' '.join(shlex.quote(a) for a in test_flags)
582
584
583 command = (
585 command = (
584 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
586 '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
585 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
587 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
586 )
588 )
587
589
588 chan, stdin, stdout = exec_command(ssh_client, command)
590 chan, stdin, stdout = exec_command(ssh_client, command)
589
591
590 stdin.close()
592 stdin.close()
591
593
592 for line in stdout:
594 for line in stdout:
593 print(line, end='')
595 print(line, end='')
594
596
595 return chan.recv_exit_status()
597 return chan.recv_exit_status()
@@ -1,510 +1,663 b''
1 # windows.py - Automation specific to Windows
1 # windows.py - Automation specific to Windows
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import datetime
10 import datetime
11 import os
11 import os
12 import paramiko
12 import paramiko
13 import pathlib
13 import pathlib
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import tempfile
16 import tempfile
17
17
18 from .pypi import upload as pypi_upload
18 from .pypi import upload as pypi_upload
19 from .winrm import run_powershell
19 from .winrm import run_powershell
20
20
21
21
22 # PowerShell commands to activate a Visual Studio 2008 environment.
22 # PowerShell commands to activate a Visual Studio 2008 environment.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
24 ACTIVATE_VC9_AMD64 = r'''
24 ACTIVATE_VC9_AMD64 = r'''
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
27 $Env:VCINSTALLDIR = "${root}\VC\"
27 $Env:VCINSTALLDIR = "${root}\VC\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
33 '''.lstrip()
33 '''.lstrip()
34
34
35 ACTIVATE_VC9_X86 = r'''
35 ACTIVATE_VC9_X86 = r'''
36 Write-Output "activating Visual Studio 2008 environment for x86"
36 Write-Output "activating Visual Studio 2008 environment for x86"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
38 $Env:VCINSTALLDIR = "${root}\VC\"
38 $Env:VCINSTALLDIR = "${root}\VC\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
44 '''.lstrip()
44 '''.lstrip()
45
45
46 HG_PURGE = r'''
46 HG_PURGE = r'''
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
48 Set-Location C:\hgdev\src
48 Set-Location C:\hgdev\src
49 hg.exe --config extensions.purge= purge --all
49 hg.exe --config extensions.purge= purge --all
50 if ($LASTEXITCODE -ne 0) {
50 if ($LASTEXITCODE -ne 0) {
51 throw "process exited non-0: $LASTEXITCODE"
51 throw "process exited non-0: $LASTEXITCODE"
52 }
52 }
53 Write-Output "purged Mercurial repo"
53 Write-Output "purged Mercurial repo"
54 '''
54 '''
55
55
56 HG_UPDATE_CLEAN = r'''
56 HG_UPDATE_CLEAN = r'''
57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
58 Set-Location C:\hgdev\src
58 Set-Location C:\hgdev\src
59 hg.exe --config extensions.purge= purge --all
59 hg.exe --config extensions.purge= purge --all
60 if ($LASTEXITCODE -ne 0) {{
60 if ($LASTEXITCODE -ne 0) {{
61 throw "process exited non-0: $LASTEXITCODE"
61 throw "process exited non-0: $LASTEXITCODE"
62 }}
62 }}
63 hg.exe update -C {revision}
63 hg.exe update -C {revision}
64 if ($LASTEXITCODE -ne 0) {{
64 if ($LASTEXITCODE -ne 0) {{
65 throw "process exited non-0: $LASTEXITCODE"
65 throw "process exited non-0: $LASTEXITCODE"
66 }}
66 }}
67 hg.exe log -r .
67 hg.exe log -r .
68 Write-Output "updated Mercurial working directory to {revision}"
68 Write-Output "updated Mercurial working directory to {revision}"
69 '''.lstrip()
69 '''.lstrip()
70
70
71 BUILD_INNO = r'''
71 BUILD_INNO_PYTHON3 = r'''
72 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
73 $Env:CARGO_HOME = "C:\hgdev\cargo"
74 Set-Location C:\hgdev\src
75 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --pyoxidizer-target {pyoxidizer_target} --version {version}
76 if ($LASTEXITCODE -ne 0) {{
77 throw "process exited non-0: $LASTEXITCODE"
78 }}
79 '''
80
81 BUILD_INNO_PYTHON2 = r'''
72 Set-Location C:\hgdev\src
82 Set-Location C:\hgdev\src
73 $python = "C:\hgdev\python27-{arch}\python.exe"
83 $python = "C:\hgdev\python27-{arch}\python.exe"
74 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python
84 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
75 if ($LASTEXITCODE -ne 0) {{
85 if ($LASTEXITCODE -ne 0) {{
76 throw "process exited non-0: $LASTEXITCODE"
86 throw "process exited non-0: $LASTEXITCODE"
77 }}
87 }}
78 '''.lstrip()
88 '''.lstrip()
79
89
80 BUILD_WHEEL = r'''
90 BUILD_WHEEL = r'''
81 Set-Location C:\hgdev\src
91 Set-Location C:\hgdev\src
82 C:\hgdev\python27-{arch}\Scripts\pip.exe wheel --wheel-dir dist .
92 C:\hgdev\python{python_version}-{arch}\python.exe -m pip wheel --wheel-dir dist .
83 if ($LASTEXITCODE -ne 0) {{
93 if ($LASTEXITCODE -ne 0) {{
84 throw "process exited non-0: $LASTEXITCODE"
94 throw "process exited non-0: $LASTEXITCODE"
85 }}
95 }}
86 '''
96 '''
87
97
88 BUILD_WIX = r'''
98 BUILD_WIX_PYTHON3 = r'''
99 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
100 $Env:CARGO_HOME = "C:\hgdev\cargo"
101 Set-Location C:\hgdev\src
102 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --pyoxidizer-target {pyoxidizer_target} --version {version}
103 if ($LASTEXITCODE -ne 0) {{
104 throw "process exited non-0: $LASTEXITCODE"
105 }}
106 '''
107
108 BUILD_WIX_PYTHON2 = r'''
89 Set-Location C:\hgdev\src
109 Set-Location C:\hgdev\src
90 $python = "C:\hgdev\python27-{arch}\python.exe"
110 $python = "C:\hgdev\python27-{arch}\python.exe"
91 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
111 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
92 if ($LASTEXITCODE -ne 0) {{
112 if ($LASTEXITCODE -ne 0) {{
93 throw "process exited non-0: $LASTEXITCODE"
113 throw "process exited non-0: $LASTEXITCODE"
94 }}
114 }}
95 '''
115 '''
96
116
97 RUN_TESTS = r'''
117 RUN_TESTS = r'''
98 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
118 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
99 if ($LASTEXITCODE -ne 0) {{
119 if ($LASTEXITCODE -ne 0) {{
100 throw "process exited non-0: $LASTEXITCODE"
120 throw "process exited non-0: $LASTEXITCODE"
101 }}
121 }}
102 '''
122 '''
103
123
104 X86_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win32.whl'
124 WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
105 X64_WHEEL_FILENAME = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
125 WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
106 X86_EXE_FILENAME = 'Mercurial-{version}.exe'
126 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
107 X64_EXE_FILENAME = 'Mercurial-{version}-x64.exe'
127 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
108 X86_MSI_FILENAME = 'mercurial-{version}-x86.msi'
128 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
109 X64_MSI_FILENAME = 'mercurial-{version}-x64.msi'
129 WHEEL_FILENAME_PYTHON38_X64 = 'mercurial-{version}-cp38-cp38-win_amd64.whl'
130
131 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
132 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
133 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
134 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
135
136 MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
137 MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
138 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
139 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
110
140
111 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
141 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
112
142
113 X86_USER_AGENT_PATTERN = '.*Windows.*'
143 X86_USER_AGENT_PATTERN = '.*Windows.*'
114 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
144 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
115
145
116 X86_EXE_DESCRIPTION = (
146 EXE_PYTHON2_X86_DESCRIPTION = (
117 'Mercurial {version} Inno Setup installer - x86 Windows '
147 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
148 '- does not require admin rights'
149 )
150 EXE_PYTHON2_X64_DESCRIPTION = (
151 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
152 '- does not require admin rights'
153 )
154 # TODO remove Python version once Python 2 is dropped.
155 EXE_PYTHON3_X86_DESCRIPTION = (
156 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
118 '- does not require admin rights'
157 '- does not require admin rights'
119 )
158 )
120 X64_EXE_DESCRIPTION = (
159 EXE_PYTHON3_X64_DESCRIPTION = (
121 'Mercurial {version} Inno Setup installer - x64 Windows '
160 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
122 '- does not require admin rights'
161 '- does not require admin rights'
123 )
162 )
124 X86_MSI_DESCRIPTION = (
163 MSI_PYTHON2_X86_DESCRIPTION = (
125 'Mercurial {version} MSI installer - x86 Windows ' '- requires admin rights'
164 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
165 '- requires admin rights'
166 )
167 MSI_PYTHON2_X64_DESCRIPTION = (
168 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
169 '- requires admin rights'
126 )
170 )
127 X64_MSI_DESCRIPTION = (
171 MSI_PYTHON3_X86_DESCRIPTION = (
128 'Mercurial {version} MSI installer - x64 Windows ' '- requires admin rights'
172 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
173 '- requires admin rights'
174 )
175 MSI_PYTHON3_X64_DESCRIPTION = (
176 'Mercurial {version} MSI installer - x64 Windows (Python 3) '
177 '- requires admin rights'
129 )
178 )
130
179
131
180
132 def get_vc_prefix(arch):
181 def get_vc_prefix(arch):
133 if arch == 'x86':
182 if arch == 'x86':
134 return ACTIVATE_VC9_X86
183 return ACTIVATE_VC9_X86
135 elif arch == 'x64':
184 elif arch == 'x64':
136 return ACTIVATE_VC9_AMD64
185 return ACTIVATE_VC9_AMD64
137 else:
186 else:
138 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
187 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
139
188
140
189
141 def fix_authorized_keys_permissions(winrm_client, path):
190 def fix_authorized_keys_permissions(winrm_client, path):
142 commands = [
191 commands = [
143 '$ErrorActionPreference = "Stop"',
192 '$ErrorActionPreference = "Stop"',
144 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
193 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
145 r'icacls %s /remove:g "NT Service\sshd"' % path,
194 r'icacls %s /remove:g "NT Service\sshd"' % path,
146 ]
195 ]
147
196
148 run_powershell(winrm_client, '\n'.join(commands))
197 run_powershell(winrm_client, '\n'.join(commands))
149
198
150
199
151 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
200 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
152 """Synchronize local Mercurial repo to remote EC2 instance."""
201 """Synchronize local Mercurial repo to remote EC2 instance."""
153
202
154 winrm_client = ec2_instance.winrm_client
203 winrm_client = ec2_instance.winrm_client
155
204
156 with tempfile.TemporaryDirectory() as temp_dir:
205 with tempfile.TemporaryDirectory() as temp_dir:
157 temp_dir = pathlib.Path(temp_dir)
206 temp_dir = pathlib.Path(temp_dir)
158
207
159 ssh_dir = temp_dir / '.ssh'
208 ssh_dir = temp_dir / '.ssh'
160 ssh_dir.mkdir()
209 ssh_dir.mkdir()
161 ssh_dir.chmod(0o0700)
210 ssh_dir.chmod(0o0700)
162
211
163 # Generate SSH key to use for communication.
212 # Generate SSH key to use for communication.
164 subprocess.run(
213 subprocess.run(
165 [
214 [
166 'ssh-keygen',
215 'ssh-keygen',
167 '-t',
216 '-t',
168 'rsa',
217 'rsa',
169 '-b',
218 '-b',
170 '4096',
219 '4096',
171 '-N',
220 '-N',
172 '',
221 '',
173 '-f',
222 '-f',
174 str(ssh_dir / 'id_rsa'),
223 str(ssh_dir / 'id_rsa'),
175 ],
224 ],
176 check=True,
225 check=True,
177 capture_output=True,
226 capture_output=True,
178 )
227 )
179
228
180 # Add it to ~/.ssh/authorized_keys on remote.
229 # Add it to ~/.ssh/authorized_keys on remote.
181 # This assumes the file doesn't already exist.
230 # This assumes the file doesn't already exist.
182 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
231 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
183 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
232 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
184 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
233 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
185 fix_authorized_keys_permissions(winrm_client, authorized_keys)
234 fix_authorized_keys_permissions(winrm_client, authorized_keys)
186
235
187 public_ip = ec2_instance.public_ip_address
236 public_ip = ec2_instance.public_ip_address
188
237
189 ssh_config = temp_dir / '.ssh' / 'config'
238 ssh_config = temp_dir / '.ssh' / 'config'
190
239
191 with open(ssh_config, 'w', encoding='utf-8') as fh:
240 with open(ssh_config, 'w', encoding='utf-8') as fh:
192 fh.write('Host %s\n' % public_ip)
241 fh.write('Host %s\n' % public_ip)
193 fh.write(' User Administrator\n')
242 fh.write(' User Administrator\n')
194 fh.write(' StrictHostKeyChecking no\n')
243 fh.write(' StrictHostKeyChecking no\n')
195 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
244 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
196 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
245 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
197
246
198 if not (hg_repo / '.hg').is_dir():
247 if not (hg_repo / '.hg').is_dir():
199 raise Exception(
248 raise Exception(
200 '%s is not a Mercurial repository; '
249 '%s is not a Mercurial repository; '
201 'synchronization not yet supported' % hg_repo
250 'synchronization not yet supported' % hg_repo
202 )
251 )
203
252
204 env = dict(os.environ)
253 env = dict(os.environ)
205 env['HGPLAIN'] = '1'
254 env['HGPLAIN'] = '1'
206 env['HGENCODING'] = 'utf-8'
255 env['HGENCODING'] = 'utf-8'
207
256
208 hg_bin = hg_repo / 'hg'
257 hg_bin = hg_repo / 'hg'
209
258
210 res = subprocess.run(
259 res = subprocess.run(
211 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
260 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
212 cwd=str(hg_repo),
261 cwd=str(hg_repo),
213 env=env,
262 env=env,
214 check=True,
263 check=True,
215 capture_output=True,
264 capture_output=True,
216 )
265 )
217
266
218 full_revision = res.stdout.decode('ascii')
267 full_revision = res.stdout.decode('ascii')
219
268
220 args = [
269 args = [
221 'python2.7',
270 'python2.7',
222 hg_bin,
271 hg_bin,
223 '--config',
272 '--config',
224 'ui.ssh=ssh -F %s' % ssh_config,
273 'ui.ssh=ssh -F %s' % ssh_config,
225 '--config',
274 '--config',
226 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
275 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
227 # Also ensure .hgtags changes are present so auto version
276 # Also ensure .hgtags changes are present so auto version
228 # calculation works.
277 # calculation works.
229 'push',
278 'push',
230 '-f',
279 '-f',
231 '-r',
280 '-r',
232 full_revision,
281 full_revision,
233 '-r',
282 '-r',
234 'file(.hgtags)',
283 'file(.hgtags)',
235 'ssh://%s/c:/hgdev/src' % public_ip,
284 'ssh://%s/c:/hgdev/src' % public_ip,
236 ]
285 ]
237
286
238 res = subprocess.run(args, cwd=str(hg_repo), env=env)
287 res = subprocess.run(args, cwd=str(hg_repo), env=env)
239
288
240 # Allow 1 (no-op) to not trigger error.
289 # Allow 1 (no-op) to not trigger error.
241 if res.returncode not in (0, 1):
290 if res.returncode not in (0, 1):
242 res.check_returncode()
291 res.check_returncode()
243
292
244 run_powershell(
293 run_powershell(
245 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
294 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
246 )
295 )
247
296
248 # TODO detect dirty local working directory and synchronize accordingly.
297 # TODO detect dirty local working directory and synchronize accordingly.
249
298
250
299
251 def purge_hg(winrm_client):
300 def purge_hg(winrm_client):
252 """Purge the Mercurial source repository on an EC2 instance."""
301 """Purge the Mercurial source repository on an EC2 instance."""
253 run_powershell(winrm_client, HG_PURGE)
302 run_powershell(winrm_client, HG_PURGE)
254
303
255
304
256 def find_latest_dist(winrm_client, pattern):
305 def find_latest_dist(winrm_client, pattern):
257 """Find path to newest file in dist/ directory matching a pattern."""
306 """Find path to newest file in dist/ directory matching a pattern."""
258
307
259 res = winrm_client.execute_ps(
308 res = winrm_client.execute_ps(
260 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
309 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
261 '| Sort-Object LastWriteTime -Descending '
310 '| Sort-Object LastWriteTime -Descending '
262 '| Select-Object -First 1\n'
311 '| Select-Object -First 1\n'
263 '$v.name' % pattern
312 '$v.name' % pattern
264 )
313 )
265 return res[0]
314 return res[0]
266
315
267
316
268 def copy_latest_dist(winrm_client, pattern, dest_path):
317 def copy_latest_dist(winrm_client, pattern, dest_path):
269 """Copy latest file matching pattern in dist/ directory.
318 """Copy latest file matching pattern in dist/ directory.
270
319
271 Given a WinRM client and a file pattern, find the latest file on the remote
320 Given a WinRM client and a file pattern, find the latest file on the remote
272 matching that pattern and copy it to the ``dest_path`` directory on the
321 matching that pattern and copy it to the ``dest_path`` directory on the
273 local machine.
322 local machine.
274 """
323 """
275 latest = find_latest_dist(winrm_client, pattern)
324 latest = find_latest_dist(winrm_client, pattern)
276 source = r'C:\hgdev\src\dist\%s' % latest
325 source = r'C:\hgdev\src\dist\%s' % latest
277 dest = dest_path / latest
326 dest = dest_path / latest
278 print('copying %s to %s' % (source, dest))
327 print('copying %s to %s' % (source, dest))
279 winrm_client.fetch(source, str(dest))
328 winrm_client.fetch(source, str(dest))
280
329
281
330
282 def build_inno_installer(
331 def build_inno_installer(
283 winrm_client, arch: str, dest_path: pathlib.Path, version=None
332 winrm_client,
333 python_version: int,
334 arch: str,
335 dest_path: pathlib.Path,
336 version=None,
284 ):
337 ):
285 """Build the Inno Setup installer on a remote machine.
338 """Build the Inno Setup installer on a remote machine.
286
339
287 Using a WinRM client, remote commands are executed to build
340 Using a WinRM client, remote commands are executed to build
288 a Mercurial Inno Setup installer.
341 a Mercurial Inno Setup installer.
289 """
342 """
290 print('building Inno Setup installer for %s' % arch)
343 print(
344 'building Inno Setup installer for Python %d %s'
345 % (python_version, arch)
346 )
347
348 if python_version == 3:
349 # TODO fix this limitation in packaging code
350 if not version:
351 raise Exception(
352 "version string is required when building for Python 3"
353 )
291
354
292 extra_args = []
355 if arch == "x86":
293 if version:
356 target_triple = "i686-pc-windows-msvc"
294 extra_args.extend(['--version', version])
357 elif arch == "x64":
358 target_triple = "x86_64-pc-windows-msvc"
359 else:
360 raise Exception("unhandled arch: %s" % arch)
295
361
296 ps = get_vc_prefix(arch) + BUILD_INNO.format(
362 ps = BUILD_INNO_PYTHON3.format(
297 arch=arch, extra_args=' '.join(extra_args)
363 pyoxidizer_target=target_triple, version=version,
298 )
364 )
365 else:
366 extra_args = []
367 if version:
368 extra_args.extend(['--version', version])
369
370 ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
371 arch=arch, extra_args=' '.join(extra_args)
372 )
373
299 run_powershell(winrm_client, ps)
374 run_powershell(winrm_client, ps)
300 copy_latest_dist(winrm_client, '*.exe', dest_path)
375 copy_latest_dist(winrm_client, '*.exe', dest_path)
301
376
302
377
303 def build_wheel(winrm_client, arch: str, dest_path: pathlib.Path):
378 def build_wheel(
379 winrm_client, python_version: str, arch: str, dest_path: pathlib.Path
380 ):
304 """Build Python wheels on a remote machine.
381 """Build Python wheels on a remote machine.
305
382
306 Using a WinRM client, remote commands are executed to build a Python wheel
383 Using a WinRM client, remote commands are executed to build a Python wheel
307 for Mercurial.
384 for Mercurial.
308 """
385 """
309 print('Building Windows wheel for %s' % arch)
386 print('Building Windows wheel for Python %s %s' % (python_version, arch))
310 ps = get_vc_prefix(arch) + BUILD_WHEEL.format(arch=arch)
387
388 ps = BUILD_WHEEL.format(
389 python_version=python_version.replace(".", ""), arch=arch
390 )
391
392 # Python 2.7 requires an activated environment.
393 if python_version == "2.7":
394 ps = get_vc_prefix(arch) + ps
395
311 run_powershell(winrm_client, ps)
396 run_powershell(winrm_client, ps)
312 copy_latest_dist(winrm_client, '*.whl', dest_path)
397 copy_latest_dist(winrm_client, '*.whl', dest_path)
313
398
314
399
315 def build_wix_installer(
400 def build_wix_installer(
316 winrm_client, arch: str, dest_path: pathlib.Path, version=None
401 winrm_client,
402 python_version: int,
403 arch: str,
404 dest_path: pathlib.Path,
405 version=None,
317 ):
406 ):
318 """Build the WiX installer on a remote machine.
407 """Build the WiX installer on a remote machine.
319
408
320 Using a WinRM client, remote commands are executed to build a WiX installer.
409 Using a WinRM client, remote commands are executed to build a WiX installer.
321 """
410 """
322 print('Building WiX installer for %s' % arch)
411 print('Building WiX installer for Python %d %s' % (python_version, arch))
323 extra_args = []
412
324 if version:
413 if python_version == 3:
325 extra_args.extend(['--version', version])
414 # TODO fix this limitation in packaging code
415 if not version:
416 raise Exception(
417 "version string is required when building for Python 3"
418 )
326
419
327 ps = get_vc_prefix(arch) + BUILD_WIX.format(
420 if arch == "x86":
328 arch=arch, extra_args=' '.join(extra_args)
421 target_triple = "i686-pc-windows-msvc"
329 )
422 elif arch == "x64":
423 target_triple = "x86_64-pc-windows-msvc"
424 else:
425 raise Exception("unhandled arch: %s" % arch)
426
427 ps = BUILD_WIX_PYTHON3.format(
428 pyoxidizer_target=target_triple, version=version,
429 )
430 else:
431 extra_args = []
432 if version:
433 extra_args.extend(['--version', version])
434
435 ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
436 arch=arch, extra_args=' '.join(extra_args)
437 )
438
330 run_powershell(winrm_client, ps)
439 run_powershell(winrm_client, ps)
331 copy_latest_dist(winrm_client, '*.msi', dest_path)
440 copy_latest_dist(winrm_client, '*.msi', dest_path)
332
441
333
442
334 def run_tests(winrm_client, python_version, arch, test_flags=''):
443 def run_tests(winrm_client, python_version, arch, test_flags=''):
335 """Run tests on a remote Windows machine.
444 """Run tests on a remote Windows machine.
336
445
337 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
446 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
338 ``arch`` is ``x86`` or ``x64``.
447 ``arch`` is ``x86`` or ``x64``.
339 ``test_flags`` is a str representing extra arguments to pass to
448 ``test_flags`` is a str representing extra arguments to pass to
340 ``run-tests.py``.
449 ``run-tests.py``.
341 """
450 """
342 if not re.match(r'\d\.\d', python_version):
451 if not re.match(r'\d\.\d', python_version):
343 raise ValueError(
452 raise ValueError(
344 r'python_version must be \d.\d; got %s' % python_version
453 r'python_version must be \d.\d; got %s' % python_version
345 )
454 )
346
455
347 if arch not in ('x86', 'x64'):
456 if arch not in ('x86', 'x64'):
348 raise ValueError('arch must be x86 or x64; got %s' % arch)
457 raise ValueError('arch must be x86 or x64; got %s' % arch)
349
458
350 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
459 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
351
460
352 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
461 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
353
462
354 run_powershell(winrm_client, ps)
463 run_powershell(winrm_client, ps)
355
464
356
465
357 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
466 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
358 return (
467 return (
359 dist_path / X86_WHEEL_FILENAME.format(version=version),
468 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
360 dist_path / X64_WHEEL_FILENAME.format(version=version),
469 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
470 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
471 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
472 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
473 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
361 )
474 )
362
475
363
476
364 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
477 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
365 return (
478 return (
366 dist_path / X86_WHEEL_FILENAME.format(version=version),
479 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
367 dist_path / X64_WHEEL_FILENAME.format(version=version),
480 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
368 dist_path / X86_EXE_FILENAME.format(version=version),
481 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
369 dist_path / X64_EXE_FILENAME.format(version=version),
482 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
370 dist_path / X86_MSI_FILENAME.format(version=version),
483 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
371 dist_path / X64_MSI_FILENAME.format(version=version),
484 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
485 dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
486 dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
487 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
488 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
489 dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
490 dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
491 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
492 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
372 )
493 )
373
494
374
495
375 def generate_latest_dat(version: str):
496 def generate_latest_dat(version: str):
376 x86_exe_filename = X86_EXE_FILENAME.format(version=version)
497 python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
377 x64_exe_filename = X64_EXE_FILENAME.format(version=version)
498 python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
378 x86_msi_filename = X86_MSI_FILENAME.format(version=version)
499 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
379 x64_msi_filename = X64_MSI_FILENAME.format(version=version)
500 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
501 python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
502 python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
503 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
504 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
380
505
381 entries = (
506 entries = (
382 (
507 (
383 '10',
508 '10',
384 version,
509 version,
385 X86_USER_AGENT_PATTERN,
510 X86_USER_AGENT_PATTERN,
386 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_exe_filename),
511 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_exe_filename),
387 X86_EXE_DESCRIPTION.format(version=version),
512 EXE_PYTHON3_X86_DESCRIPTION.format(version=version),
388 ),
513 ),
389 (
514 (
390 '10',
515 '10',
391 version,
516 version,
392 X64_USER_AGENT_PATTERN,
517 X64_USER_AGENT_PATTERN,
393 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_exe_filename),
518 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_exe_filename),
394 X64_EXE_DESCRIPTION.format(version=version),
519 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
520 ),
521 (
522 '9',
523 version,
524 X86_USER_AGENT_PATTERN,
525 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
526 EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
527 ),
528 (
529 '9',
530 version,
531 X64_USER_AGENT_PATTERN,
532 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
533 EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
395 ),
534 ),
396 (
535 (
397 '10',
536 '10',
398 version,
537 version,
399 X86_USER_AGENT_PATTERN,
538 X86_USER_AGENT_PATTERN,
400 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x86_msi_filename),
539 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_msi_filename),
401 X86_MSI_DESCRIPTION.format(version=version),
540 MSI_PYTHON3_X86_DESCRIPTION.format(version=version),
402 ),
541 ),
403 (
542 (
404 '10',
543 '10',
405 version,
544 version,
406 X64_USER_AGENT_PATTERN,
545 X64_USER_AGENT_PATTERN,
407 '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_msi_filename),
546 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
408 X64_MSI_DESCRIPTION.format(version=version),
547 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
548 ),
549 (
550 '9',
551 version,
552 X86_USER_AGENT_PATTERN,
553 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
554 MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
555 ),
556 (
557 '9',
558 version,
559 X64_USER_AGENT_PATTERN,
560 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
561 MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
409 ),
562 ),
410 )
563 )
411
564
412 lines = ['\t'.join(e) for e in entries]
565 lines = ['\t'.join(e) for e in entries]
413
566
414 return '\n'.join(lines) + '\n'
567 return '\n'.join(lines) + '\n'
415
568
416
569
417 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
570 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
418 """Publish Windows release artifacts to PyPI."""
571 """Publish Windows release artifacts to PyPI."""
419
572
420 wheel_paths = resolve_wheel_artifacts(dist_path, version)
573 wheel_paths = resolve_wheel_artifacts(dist_path, version)
421
574
422 for p in wheel_paths:
575 for p in wheel_paths:
423 if not p.exists():
576 if not p.exists():
424 raise Exception('%s not found' % p)
577 raise Exception('%s not found' % p)
425
578
426 print('uploading wheels to PyPI (you may be prompted for credentials)')
579 print('uploading wheels to PyPI (you may be prompted for credentials)')
427 pypi_upload(wheel_paths)
580 pypi_upload(wheel_paths)
428
581
429
582
430 def publish_artifacts_mercurial_scm_org(
583 def publish_artifacts_mercurial_scm_org(
431 dist_path: pathlib.Path, version: str, ssh_username=None
584 dist_path: pathlib.Path, version: str, ssh_username=None
432 ):
585 ):
433 """Publish Windows release artifacts to mercurial-scm.org."""
586 """Publish Windows release artifacts to mercurial-scm.org."""
434 all_paths = resolve_all_artifacts(dist_path, version)
587 all_paths = resolve_all_artifacts(dist_path, version)
435
588
436 for p in all_paths:
589 for p in all_paths:
437 if not p.exists():
590 if not p.exists():
438 raise Exception('%s not found' % p)
591 raise Exception('%s not found' % p)
439
592
440 client = paramiko.SSHClient()
593 client = paramiko.SSHClient()
441 client.load_system_host_keys()
594 client.load_system_host_keys()
442 # We assume the system SSH configuration knows how to connect.
595 # We assume the system SSH configuration knows how to connect.
443 print('connecting to mercurial-scm.org via ssh...')
596 print('connecting to mercurial-scm.org via ssh...')
444 try:
597 try:
445 client.connect('mercurial-scm.org', username=ssh_username)
598 client.connect('mercurial-scm.org', username=ssh_username)
446 except paramiko.AuthenticationException:
599 except paramiko.AuthenticationException:
447 print('error authenticating; is an SSH key available in an SSH agent?')
600 print('error authenticating; is an SSH key available in an SSH agent?')
448 raise
601 raise
449
602
450 print('SSH connection established')
603 print('SSH connection established')
451
604
452 print('opening SFTP client...')
605 print('opening SFTP client...')
453 sftp = client.open_sftp()
606 sftp = client.open_sftp()
454 print('SFTP client obtained')
607 print('SFTP client obtained')
455
608
456 for p in all_paths:
609 for p in all_paths:
457 dest_path = '/var/www/release/windows/%s' % p.name
610 dest_path = '/var/www/release/windows/%s' % p.name
458 print('uploading %s to %s' % (p, dest_path))
611 print('uploading %s to %s' % (p, dest_path))
459
612
460 with p.open('rb') as fh:
613 with p.open('rb') as fh:
461 data = fh.read()
614 data = fh.read()
462
615
463 with sftp.open(dest_path, 'wb') as fh:
616 with sftp.open(dest_path, 'wb') as fh:
464 fh.write(data)
617 fh.write(data)
465 fh.chmod(0o0664)
618 fh.chmod(0o0664)
466
619
467 latest_dat_path = '/var/www/release/windows/latest.dat'
620 latest_dat_path = '/var/www/release/windows/latest.dat'
468
621
469 now = datetime.datetime.utcnow()
622 now = datetime.datetime.utcnow()
470 backup_path = dist_path / (
623 backup_path = dist_path / (
471 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
624 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
472 )
625 )
473 print('backing up %s to %s' % (latest_dat_path, backup_path))
626 print('backing up %s to %s' % (latest_dat_path, backup_path))
474
627
475 with sftp.open(latest_dat_path, 'rb') as fh:
628 with sftp.open(latest_dat_path, 'rb') as fh:
476 latest_dat_old = fh.read()
629 latest_dat_old = fh.read()
477
630
478 with backup_path.open('wb') as fh:
631 with backup_path.open('wb') as fh:
479 fh.write(latest_dat_old)
632 fh.write(latest_dat_old)
480
633
481 print('writing %s with content:' % latest_dat_path)
634 print('writing %s with content:' % latest_dat_path)
482 latest_dat_content = generate_latest_dat(version)
635 latest_dat_content = generate_latest_dat(version)
483 print(latest_dat_content)
636 print(latest_dat_content)
484
637
485 with sftp.open(latest_dat_path, 'wb') as fh:
638 with sftp.open(latest_dat_path, 'wb') as fh:
486 fh.write(latest_dat_content.encode('ascii'))
639 fh.write(latest_dat_content.encode('ascii'))
487
640
488
641
489 def publish_artifacts(
642 def publish_artifacts(
490 dist_path: pathlib.Path,
643 dist_path: pathlib.Path,
491 version: str,
644 version: str,
492 pypi=True,
645 pypi=True,
493 mercurial_scm_org=True,
646 mercurial_scm_org=True,
494 ssh_username=None,
647 ssh_username=None,
495 ):
648 ):
496 """Publish Windows release artifacts.
649 """Publish Windows release artifacts.
497
650
498 Files are found in `dist_path`. We will look for files with version string
651 Files are found in `dist_path`. We will look for files with version string
499 `version`.
652 `version`.
500
653
501 `pypi` controls whether we upload to PyPI.
654 `pypi` controls whether we upload to PyPI.
502 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
655 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
503 """
656 """
504 if pypi:
657 if pypi:
505 publish_artifacts_pypi(dist_path, version)
658 publish_artifacts_pypi(dist_path, version)
506
659
507 if mercurial_scm_org:
660 if mercurial_scm_org:
508 publish_artifacts_mercurial_scm_org(
661 publish_artifacts_mercurial_scm_org(
509 dist_path, version, ssh_username=ssh_username
662 dist_path, version, ssh_username=ssh_username
510 )
663 )
@@ -1,200 +1,220 b''
1 # install-dependencies.ps1 - Install Windows dependencies for building Mercurial
1 # install-dependencies.ps1 - Install Windows dependencies for building Mercurial
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This script can be used to bootstrap a Mercurial build environment on
8 # This script can be used to bootstrap a Mercurial build environment on
9 # Windows.
9 # Windows.
10 #
10 #
11 # The script makes a lot of assumptions about how things should work.
11 # The script makes a lot of assumptions about how things should work.
12 # For example, the install location of Python is hardcoded to c:\hgdev\*.
12 # For example, the install location of Python is hardcoded to c:\hgdev\*.
13 #
13 #
14 # The script should be executed from a PowerShell with elevated privileges
14 # The script should be executed from a PowerShell with elevated privileges
15 # if you don't want to see a UAC prompt for various installers.
15 # if you don't want to see a UAC prompt for various installers.
16 #
16 #
17 # The script is tested on Windows 10 and Windows Server 2019 (in EC2).
17 # The script is tested on Windows 10 and Windows Server 2019 (in EC2).
18
18
19 $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe"
19 $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe"
20 $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139"
20 $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139"
21
21
22 $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi"
22 $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi"
23 $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf"
23 $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf"
24
24
25 $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi"
25 $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi"
26 $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05"
26 $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05"
27 $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi"
27 $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi"
28 $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9"
28 $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9"
29
29
30 $PYTHON35_x86_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe"
30 $PYTHON35_x86_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe"
31 $PYTHON35_x86_SHA256 = "F27C2D67FD9688E4970F3BFF799BB9D722A0D6C2C13B04848E1F7D620B524B0E"
31 $PYTHON35_x86_SHA256 = "F27C2D67FD9688E4970F3BFF799BB9D722A0D6C2C13B04848E1F7D620B524B0E"
32 $PYTHON35_x64_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4-amd64.exe"
32 $PYTHON35_x64_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4-amd64.exe"
33 $PYTHON35_x64_SHA256 = "9B7741CC32357573A77D2EE64987717E527628C38FD7EAF3E2AACA853D45A1EE"
33 $PYTHON35_x64_SHA256 = "9B7741CC32357573A77D2EE64987717E527628C38FD7EAF3E2AACA853D45A1EE"
34
34
35 $PYTHON36_x86_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8.exe"
35 $PYTHON36_x86_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8.exe"
36 $PYTHON36_x86_SHA256 = "89871D432BC06E4630D7B64CB1A8451E53C80E68DE29029976B12AAD7DBFA5A0"
36 $PYTHON36_x86_SHA256 = "89871D432BC06E4630D7B64CB1A8451E53C80E68DE29029976B12AAD7DBFA5A0"
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
39
39
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7.exe"
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7.exe"
41 $PYTHON37_x86_SHA256 = "27fbffcd342d5055acc64050db4c35d0025661521e642b59c381dcba2e162c6a"
41 $PYTHON37_x86_SHA256 = "27fbffcd342d5055acc64050db4c35d0025661521e642b59c381dcba2e162c6a"
42 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7-amd64.exe"
42 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7-amd64.exe"
43 $PYTHON37_x64_SHA256 = "1a0368663ceff999d865de955992b6ea3cb0c8cb15a1a296a8eb7df19cc59e69"
43 $PYTHON37_x64_SHA256 = "1a0368663ceff999d865de955992b6ea3cb0c8cb15a1a296a8eb7df19cc59e69"
44
44
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2.exe"
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2.exe"
46 $PYTHON38_x86_SHA256 = "03ac5754a69c9c11c08d1f4d694c14625a4d27348ad4dd2d1253e2547819db2c"
46 $PYTHON38_x86_SHA256 = "03ac5754a69c9c11c08d1f4d694c14625a4d27348ad4dd2d1253e2547819db2c"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2-amd64.exe"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2-amd64.exe"
48 $PYTHON38_x64_SHA256 = "8e400e3f32cdcb746e62e0db4d3ae4cba1f927141ebc4d0d5a4006b0daee8921"
48 $PYTHON38_x64_SHA256 = "8e400e3f32cdcb746e62e0db4d3ae4cba1f927141ebc4d0d5a4006b0daee8921"
49
49
50 # PIP 19.2.3.
50 # PIP 19.2.3.
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
52 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
52 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
53
53
54 $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz"
54 $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz"
55 $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2"
55 $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2"
56
56
57 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
57 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
58 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
58 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
59
59
60 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
60 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
61 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
61 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
62
62
63 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl"
63 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl"
64 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME"
64 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME"
65 $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f"
65 $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f"
66
66
67 $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe"
68 $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72"
69
67 # Writing progress slows down downloads substantially. So disable it.
70 # Writing progress slows down downloads substantially. So disable it.
68 $progressPreference = 'silentlyContinue'
71 $progressPreference = 'silentlyContinue'
69
72
70 function Secure-Download($url, $path, $sha256) {
73 function Secure-Download($url, $path, $sha256) {
71 if (Test-Path -Path $path) {
74 if (Test-Path -Path $path) {
72 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
75 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
73
76
74 if ($hash.Hash -eq $sha256) {
77 if ($hash.Hash -eq $sha256) {
75 Write-Output "SHA256 of $path verified as $sha256"
78 Write-Output "SHA256 of $path verified as $sha256"
76 return
79 return
77 }
80 }
78
81
79 Write-Output "hash mismatch on $path; downloading again"
82 Write-Output "hash mismatch on $path; downloading again"
80 }
83 }
81
84
82 Write-Output "downloading $url to $path"
85 Write-Output "downloading $url to $path"
83 Invoke-WebRequest -Uri $url -OutFile $path
86 Invoke-WebRequest -Uri $url -OutFile $path
84 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
87 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
85
88
86 if ($hash.Hash -ne $sha256) {
89 if ($hash.Hash -ne $sha256) {
87 Remove-Item -Path $path
90 Remove-Item -Path $path
88 throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256"
91 throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256"
89 }
92 }
90 }
93 }
91
94
92 function Invoke-Process($path, $arguments) {
95 function Invoke-Process($path, $arguments) {
93 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
96 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
94
97
95 if ($p.ExitCode -ne 0) {
98 if ($p.ExitCode -ne 0) {
96 throw "process exited non-0: $($p.ExitCode)"
99 throw "process exited non-0: $($p.ExitCode)"
97 }
100 }
98 }
101 }
99
102
100 function Install-Python3($name, $installer, $dest, $pip) {
103 function Install-Python3($name, $installer, $dest, $pip) {
101 Write-Output "installing $name"
104 Write-Output "installing $name"
102
105
103 # We hit this when running the script as part of Simple Systems Manager in
106 # We hit this when running the script as part of Simple Systems Manager in
104 # EC2. The Python 3 installer doesn't seem to like per-user installs
107 # EC2. The Python 3 installer doesn't seem to like per-user installs
105 # when running as the SYSTEM user. So enable global installs if executed in
108 # when running as the SYSTEM user. So enable global installs if executed in
106 # this mode.
109 # this mode.
107 if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") {
110 if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") {
108 Write-Output "running with SYSTEM account; installing for all users"
111 Write-Output "running with SYSTEM account; installing for all users"
109 $allusers = "1"
112 $allusers = "1"
110 }
113 }
111 else {
114 else {
112 $allusers = "0"
115 $allusers = "0"
113 }
116 }
114
117
115 Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0"
118 Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0"
116 Invoke-Process ${dest}\python.exe $pip
119 Invoke-Process ${dest}\python.exe $pip
117 }
120 }
118
121
122 function Install-Rust($prefix) {
123 Write-Output "installing Rust"
124 $Env:RUSTUP_HOME = "${prefix}\rustup"
125 $Env:CARGO_HOME = "${prefix}\cargo"
126
127 Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc"
128 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc"
129 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.42.0"
130 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy"
131
132 # Install PyOxidizer for packaging.
133 Invoke-Process "${prefix}\cargo\bin\cargo.exe" "install --version 0.7.0 pyoxidizer"
134 }
135
119 function Install-Dependencies($prefix) {
136 function Install-Dependencies($prefix) {
120 if (!(Test-Path -Path $prefix\assets)) {
137 if (!(Test-Path -Path $prefix\assets)) {
121 New-Item -Path $prefix\assets -ItemType Directory
138 New-Item -Path $prefix\assets -ItemType Directory
122 }
139 }
123
140
124 $pip = "${prefix}\assets\get-pip.py"
141 $pip = "${prefix}\assets\get-pip.py"
125
142
126 Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256
143 Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256
127 Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256
144 Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256
128 Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256
145 Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256
129 Secure-Download $PYTHON35_x86_URL ${prefix}\assets\python35-x86.exe $PYTHON35_x86_SHA256
146 Secure-Download $PYTHON35_x86_URL ${prefix}\assets\python35-x86.exe $PYTHON35_x86_SHA256
130 Secure-Download $PYTHON35_x64_URL ${prefix}\assets\python35-x64.exe $PYTHON35_x64_SHA256
147 Secure-Download $PYTHON35_x64_URL ${prefix}\assets\python35-x64.exe $PYTHON35_x64_SHA256
131 Secure-Download $PYTHON36_x86_URL ${prefix}\assets\python36-x86.exe $PYTHON36_x86_SHA256
148 Secure-Download $PYTHON36_x86_URL ${prefix}\assets\python36-x86.exe $PYTHON36_x86_SHA256
132 Secure-Download $PYTHON36_x64_URL ${prefix}\assets\python36-x64.exe $PYTHON36_x64_SHA256
149 Secure-Download $PYTHON36_x64_URL ${prefix}\assets\python36-x64.exe $PYTHON36_x64_SHA256
133 Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256
150 Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256
134 Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256
151 Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256
135 Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256
152 Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256
136 Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
153 Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
137 Secure-Download $PIP_URL ${pip} $PIP_SHA256
154 Secure-Download $PIP_URL ${pip} $PIP_SHA256
138 Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256
155 Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256
139 Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
156 Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
140 Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
157 Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
141 Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256
158 Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256
142 Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256
159 Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256
160 Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256
143
161
144 Write-Output "installing Python 2.7 32-bit"
162 Write-Output "installing Python 2.7 32-bit"
145 Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS="
163 Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS="
146 Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py
164 Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py
147 Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz"
165 Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz"
148
166
149 Write-Output "installing Python 2.7 64-bit"
167 Write-Output "installing Python 2.7 64-bit"
150 Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS="
168 Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS="
151 Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py
169 Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py
152 Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz"
170 Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz"
153
171
154 Install-Python3 "Python 3.5 32-bit" ${prefix}\assets\python35-x86.exe ${prefix}\python35-x86 ${pip}
172 Install-Python3 "Python 3.5 32-bit" ${prefix}\assets\python35-x86.exe ${prefix}\python35-x86 ${pip}
155 Install-Python3 "Python 3.5 64-bit" ${prefix}\assets\python35-x64.exe ${prefix}\python35-x64 ${pip}
173 Install-Python3 "Python 3.5 64-bit" ${prefix}\assets\python35-x64.exe ${prefix}\python35-x64 ${pip}
156 Install-Python3 "Python 3.6 32-bit" ${prefix}\assets\python36-x86.exe ${prefix}\python36-x86 ${pip}
174 Install-Python3 "Python 3.6 32-bit" ${prefix}\assets\python36-x86.exe ${prefix}\python36-x86 ${pip}
157 Install-Python3 "Python 3.6 64-bit" ${prefix}\assets\python36-x64.exe ${prefix}\python36-x64 ${pip}
175 Install-Python3 "Python 3.6 64-bit" ${prefix}\assets\python36-x64.exe ${prefix}\python36-x64 ${pip}
158 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
176 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
159 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
177 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
160 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
178 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
161 Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
179 Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
162
180
163 Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
181 Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
164 Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
182 Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
165
183
184 Install-Rust ${prefix}
185
166 Write-Output "installing Visual C++ 9.0 for Python 2.7"
186 Write-Output "installing Visual C++ 9.0 for Python 2.7"
167 Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q"
187 Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q"
168
188
169 Write-Output "installing Inno Setup"
189 Write-Output "installing Inno Setup"
170 Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES"
190 Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES"
171
191
172 Write-Output "extracting MinGW base archive"
192 Write-Output "extracting MinGW base archive"
173 Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force
193 Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force
174
194
175 Write-Output "updating MinGW package catalogs"
195 Write-Output "updating MinGW package catalogs"
176 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update"
196 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update"
177
197
178 Write-Output "installing MinGW packages"
198 Write-Output "installing MinGW packages"
179 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip"
199 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip"
180
200
181 # Construct a virtualenv useful for bootstrapping. It conveniently contains a
201 # Construct a virtualenv useful for bootstrapping. It conveniently contains a
182 # Mercurial install.
202 # Mercurial install.
183 Write-Output "creating bootstrap virtualenv with Mercurial"
203 Write-Output "creating bootstrap virtualenv with Mercurial"
184 Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap"
204 Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap"
185 Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}"
205 Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}"
186 }
206 }
187
207
188 function Clone-Mercurial-Repo($prefix, $repo_url, $dest) {
208 function Clone-Mercurial-Repo($prefix, $repo_url, $dest) {
189 Write-Output "cloning $repo_url to $dest"
209 Write-Output "cloning $repo_url to $dest"
190 # TODO Figure out why CA verification isn't working in EC2 and remove
210 # TODO Figure out why CA verification isn't working in EC2 and remove
191 # --insecure.
211 # --insecure.
192 Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest"
212 Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest"
193
213
194 # Mark repo as non-publishing by default for convenience.
214 # Mark repo as non-publishing by default for convenience.
195 Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false"
215 Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false"
196 }
216 }
197
217
198 $prefix = "c:\hgdev"
218 $prefix = "c:\hgdev"
199 Install-Dependencies $prefix
219 Install-Dependencies $prefix
200 Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src
220 Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src
@@ -1,153 +1,183 b''
1 # cli.py - Command line interface for automation
1 # cli.py - Command line interface for automation
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import argparse
10 import argparse
11 import os
11 import os
12 import pathlib
12 import pathlib
13
13
14 from . import (
14 from . import (
15 inno,
15 inno,
16 wix,
16 wix,
17 )
17 )
18
18
19 HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
19 HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
20 SOURCE_DIR = HERE.parent.parent.parent
20 SOURCE_DIR = HERE.parent.parent.parent
21
21
22
22
23 def build_inno(python=None, iscc=None, version=None):
23 def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None):
24 if not os.path.isabs(python):
24 if not pyoxidizer_target and not python:
25 raise Exception("--python required unless building with PyOxidizer")
26
27 if python and not os.path.isabs(python):
25 raise Exception("--python arg must be an absolute path")
28 raise Exception("--python arg must be an absolute path")
26
29
27 if iscc:
30 if iscc:
28 iscc = pathlib.Path(iscc)
31 iscc = pathlib.Path(iscc)
29 else:
32 else:
30 iscc = (
33 iscc = (
31 pathlib.Path(os.environ["ProgramFiles(x86)"])
34 pathlib.Path(os.environ["ProgramFiles(x86)"])
32 / "Inno Setup 5"
35 / "Inno Setup 5"
33 / "ISCC.exe"
36 / "ISCC.exe"
34 )
37 )
35
38
36 build_dir = SOURCE_DIR / "build"
39 build_dir = SOURCE_DIR / "build"
37
40
38 inno.build(
41 if pyoxidizer_target:
39 SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
42 inno.build_with_pyoxidizer(
40 )
43 SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version
44 )
45 else:
46 inno.build_with_py2exe(
47 SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
48 )
41
49
42
50
43 def build_wix(
51 def build_wix(
44 name=None,
52 name=None,
53 pyoxidizer_target=None,
45 python=None,
54 python=None,
46 version=None,
55 version=None,
47 sign_sn=None,
56 sign_sn=None,
48 sign_cert=None,
57 sign_cert=None,
49 sign_password=None,
58 sign_password=None,
50 sign_timestamp_url=None,
59 sign_timestamp_url=None,
51 extra_packages_script=None,
60 extra_packages_script=None,
52 extra_wxs=None,
61 extra_wxs=None,
53 extra_features=None,
62 extra_features=None,
54 ):
63 ):
55 fn = wix.build_installer
64 if not pyoxidizer_target and not python:
65 raise Exception("--python required unless building with PyOxidizer")
66
67 if python and not os.path.isabs(python):
68 raise Exception("--python arg must be an absolute path")
69
56 kwargs = {
70 kwargs = {
57 "source_dir": SOURCE_DIR,
71 "source_dir": SOURCE_DIR,
58 "python_exe": pathlib.Path(python),
59 "version": version,
72 "version": version,
60 }
73 }
61
74
62 if not os.path.isabs(python):
75 if pyoxidizer_target:
63 raise Exception("--python arg must be an absolute path")
76 fn = wix.build_installer_pyoxidizer
77 kwargs["target_triple"] = pyoxidizer_target
78 else:
79 fn = wix.build_installer_py2exe
80 kwargs["python_exe"] = pathlib.Path(python)
64
81
65 if extra_packages_script:
82 if extra_packages_script:
83 if pyoxidizer_target:
84 raise Exception(
85 "pyoxidizer does not support --extra-packages-script"
86 )
66 kwargs["extra_packages_script"] = extra_packages_script
87 kwargs["extra_packages_script"] = extra_packages_script
67 if extra_wxs:
88 if extra_wxs:
68 kwargs["extra_wxs"] = dict(
89 kwargs["extra_wxs"] = dict(
69 thing.split("=") for thing in extra_wxs.split(",")
90 thing.split("=") for thing in extra_wxs.split(",")
70 )
91 )
71 if extra_features:
92 if extra_features:
72 kwargs["extra_features"] = extra_features.split(",")
93 kwargs["extra_features"] = extra_features.split(",")
73
94
74 if sign_sn or sign_cert:
95 if sign_sn or sign_cert:
75 fn = wix.build_signed_installer
96 kwargs["signing_info"] = {
76 kwargs["name"] = name
97 "name": name,
77 kwargs["subject_name"] = sign_sn
98 "subject_name": sign_sn,
78 kwargs["cert_path"] = sign_cert
99 "cert_path": sign_cert,
79 kwargs["cert_password"] = sign_password
100 "cert_password": sign_password,
80 kwargs["timestamp_url"] = sign_timestamp_url
101 "timestamp_url": sign_timestamp_url,
102 }
81
103
82 fn(**kwargs)
104 fn(**kwargs)
83
105
84
106
85 def get_parser():
107 def get_parser():
86 parser = argparse.ArgumentParser()
108 parser = argparse.ArgumentParser()
87
109
88 subparsers = parser.add_subparsers()
110 subparsers = parser.add_subparsers()
89
111
90 sp = subparsers.add_parser("inno", help="Build Inno Setup installer")
112 sp = subparsers.add_parser("inno", help="Build Inno Setup installer")
91 sp.add_argument("--python", required=True, help="path to python.exe to use")
113 sp.add_argument(
114 "--pyoxidizer-target",
115 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
116 help="Build with PyOxidizer targeting this host triple",
117 )
118 sp.add_argument("--python", help="path to python.exe to use")
92 sp.add_argument("--iscc", help="path to iscc.exe to use")
119 sp.add_argument("--iscc", help="path to iscc.exe to use")
93 sp.add_argument(
120 sp.add_argument(
94 "--version",
121 "--version",
95 help="Mercurial version string to use "
122 help="Mercurial version string to use "
96 "(detected from __version__.py if not defined",
123 "(detected from __version__.py if not defined",
97 )
124 )
98 sp.set_defaults(func=build_inno)
125 sp.set_defaults(func=build_inno)
99
126
100 sp = subparsers.add_parser(
127 sp = subparsers.add_parser(
101 "wix", help="Build Windows installer with WiX Toolset"
128 "wix", help="Build Windows installer with WiX Toolset"
102 )
129 )
103 sp.add_argument("--name", help="Application name", default="Mercurial")
130 sp.add_argument("--name", help="Application name", default="Mercurial")
104 sp.add_argument(
131 sp.add_argument(
105 "--python", help="Path to Python executable to use", required=True
132 "--pyoxidizer-target",
133 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
134 help="Build with PyOxidizer targeting this host triple",
106 )
135 )
136 sp.add_argument("--python", help="Path to Python executable to use")
107 sp.add_argument(
137 sp.add_argument(
108 "--sign-sn",
138 "--sign-sn",
109 help="Subject name (or fragment thereof) of certificate "
139 help="Subject name (or fragment thereof) of certificate "
110 "to use for signing",
140 "to use for signing",
111 )
141 )
112 sp.add_argument(
142 sp.add_argument(
113 "--sign-cert", help="Path to certificate to use for signing"
143 "--sign-cert", help="Path to certificate to use for signing"
114 )
144 )
115 sp.add_argument("--sign-password", help="Password for signing certificate")
145 sp.add_argument("--sign-password", help="Password for signing certificate")
116 sp.add_argument(
146 sp.add_argument(
117 "--sign-timestamp-url",
147 "--sign-timestamp-url",
118 help="URL of timestamp server to use for signing",
148 help="URL of timestamp server to use for signing",
119 )
149 )
120 sp.add_argument("--version", help="Version string to use")
150 sp.add_argument("--version", help="Version string to use")
121 sp.add_argument(
151 sp.add_argument(
122 "--extra-packages-script",
152 "--extra-packages-script",
123 help=(
153 help=(
124 "Script to execute to include extra packages in " "py2exe binary."
154 "Script to execute to include extra packages in " "py2exe binary."
125 ),
155 ),
126 )
156 )
127 sp.add_argument(
157 sp.add_argument(
128 "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
158 "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
129 )
159 )
130 sp.add_argument(
160 sp.add_argument(
131 "--extra-features",
161 "--extra-features",
132 help=(
162 help=(
133 "CSV of extra feature names to include "
163 "CSV of extra feature names to include "
134 "in the installer from the extra wxs files"
164 "in the installer from the extra wxs files"
135 ),
165 ),
136 )
166 )
137 sp.set_defaults(func=build_wix)
167 sp.set_defaults(func=build_wix)
138
168
139 return parser
169 return parser
140
170
141
171
142 def main():
172 def main():
143 parser = get_parser()
173 parser = get_parser()
144 args = parser.parse_args()
174 args = parser.parse_args()
145
175
146 if not hasattr(args, "func"):
176 if not hasattr(args, "func"):
147 parser.print_help()
177 parser.print_help()
148 return
178 return
149
179
150 kwargs = dict(vars(args))
180 kwargs = dict(vars(args))
151 del kwargs["func"]
181 del kwargs["func"]
152
182
153 args.func(**kwargs)
183 args.func(**kwargs)
@@ -1,173 +1,232 b''
1 # inno.py - Inno Setup functionality.
1 # inno.py - Inno Setup functionality.
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import os
10 import os
11 import pathlib
11 import pathlib
12 import shutil
12 import shutil
13 import subprocess
13 import subprocess
14
14
15 import jinja2
15 import jinja2
16
16
17 from .py2exe import (
17 from .py2exe import (
18 build_py2exe,
18 build_py2exe,
19 stage_install,
19 stage_install,
20 )
20 )
21 from .pyoxidizer import run_pyoxidizer
21 from .util import (
22 from .util import (
22 find_vc_runtime_files,
23 find_legacy_vc_runtime_files,
23 normalize_windows_version,
24 normalize_windows_version,
24 process_install_rules,
25 process_install_rules,
25 read_version_py,
26 read_version_py,
26 )
27 )
27
28
28 EXTRA_PACKAGES = {
29 EXTRA_PACKAGES = {
29 'dulwich',
30 'dulwich',
30 'keyring',
31 'keyring',
31 'pygments',
32 'pygments',
32 'win32ctypes',
33 'win32ctypes',
33 }
34 }
34
35
35 EXTRA_INSTALL_RULES = [
36 EXTRA_INSTALL_RULES = [
36 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
37 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
37 ]
38 ]
38
39
39 PACKAGE_FILES_METADATA = {
40 PACKAGE_FILES_METADATA = {
40 'ReadMe.html': 'Flags: isreadme',
41 'ReadMe.html': 'Flags: isreadme',
41 }
42 }
42
43
43
44
44 def build(
45 def build_with_py2exe(
45 source_dir: pathlib.Path,
46 source_dir: pathlib.Path,
46 build_dir: pathlib.Path,
47 build_dir: pathlib.Path,
47 python_exe: pathlib.Path,
48 python_exe: pathlib.Path,
48 iscc_exe: pathlib.Path,
49 iscc_exe: pathlib.Path,
49 version=None,
50 version=None,
50 ):
51 ):
51 """Build the Inno installer.
52 """Build the Inno installer using py2exe.
52
53
53 Build files will be placed in ``build_dir``.
54 Build files will be placed in ``build_dir``.
54
55
55 py2exe's setup.py doesn't use setuptools. It doesn't have modern logic
56 py2exe's setup.py doesn't use setuptools. It doesn't have modern logic
56 for finding the Python 2.7 toolchain. So, we require the environment
57 for finding the Python 2.7 toolchain. So, we require the environment
57 to already be configured with an active toolchain.
58 to already be configured with an active toolchain.
58 """
59 """
59 if not iscc_exe.exists():
60 if not iscc_exe.exists():
60 raise Exception('%s does not exist' % iscc_exe)
61 raise Exception('%s does not exist' % iscc_exe)
61
62
62 vc_x64 = r'\x64' in os.environ.get('LIB', '')
63 vc_x64 = r'\x64' in os.environ.get('LIB', '')
63 arch = 'x64' if vc_x64 else 'x86'
64 arch = 'x64' if vc_x64 else 'x86'
64 inno_source_dir = source_dir / 'contrib' / 'packaging' / 'inno'
65 inno_build_dir = build_dir / ('inno-py2exe-%s' % arch)
65 inno_build_dir = build_dir / ('inno-%s' % arch)
66 staging_dir = inno_build_dir / 'stage'
66 staging_dir = inno_build_dir / 'stage'
67
67
68 requirements_txt = (
68 requirements_txt = (
69 source_dir / 'contrib' / 'packaging' / 'requirements_win32.txt'
69 source_dir / 'contrib' / 'packaging' / 'requirements_win32.txt'
70 )
70 )
71
71
72 inno_build_dir.mkdir(parents=True, exist_ok=True)
72 inno_build_dir.mkdir(parents=True, exist_ok=True)
73
73
74 build_py2exe(
74 build_py2exe(
75 source_dir,
75 source_dir,
76 build_dir,
76 build_dir,
77 python_exe,
77 python_exe,
78 'inno',
78 'inno',
79 requirements_txt,
79 requirements_txt,
80 extra_packages=EXTRA_PACKAGES,
80 extra_packages=EXTRA_PACKAGES,
81 )
81 )
82
82
83 # Purge the staging directory for every build so packaging is
83 # Purge the staging directory for every build so packaging is
84 # pristine.
84 # pristine.
85 if staging_dir.exists():
85 if staging_dir.exists():
86 print('purging %s' % staging_dir)
86 print('purging %s' % staging_dir)
87 shutil.rmtree(staging_dir)
87 shutil.rmtree(staging_dir)
88
88
89 # Now assemble all the packaged files into the staging directory.
89 # Now assemble all the packaged files into the staging directory.
90 stage_install(source_dir, staging_dir)
90 stage_install(source_dir, staging_dir)
91
91
92 # We also install some extra files.
92 # We also install some extra files.
93 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
93 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
94
94
95 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
95 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
96 for f in find_vc_runtime_files(vc_x64):
96 for f in find_legacy_vc_runtime_files(vc_x64):
97 if f.name.endswith('.manifest'):
97 if f.name.endswith('.manifest'):
98 basename = 'Microsoft.VC90.CRT.manifest'
98 basename = 'Microsoft.VC90.CRT.manifest'
99 else:
99 else:
100 basename = f.name
100 basename = f.name
101
101
102 dest_path = staging_dir / basename
102 dest_path = staging_dir / basename
103
103
104 print('copying %s to %s' % (f, dest_path))
104 print('copying %s to %s' % (f, dest_path))
105 shutil.copyfile(f, dest_path)
105 shutil.copyfile(f, dest_path)
106
106
107 build_installer(
108 source_dir,
109 inno_build_dir,
110 staging_dir,
111 iscc_exe,
112 version,
113 arch="x64" if vc_x64 else None,
114 suffix="-python2",
115 )
116
117
118 def build_with_pyoxidizer(
119 source_dir: pathlib.Path,
120 build_dir: pathlib.Path,
121 target_triple: str,
122 iscc_exe: pathlib.Path,
123 version=None,
124 ):
125 """Build the Inno installer using PyOxidizer."""
126 if not iscc_exe.exists():
127 raise Exception("%s does not exist" % iscc_exe)
128
129 inno_build_dir = build_dir / ("inno-pyoxidizer-%s" % target_triple)
130 staging_dir = inno_build_dir / "stage"
131
132 inno_build_dir.mkdir(parents=True, exist_ok=True)
133 run_pyoxidizer(source_dir, inno_build_dir, staging_dir, target_triple)
134
135 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
136
137 build_installer(
138 source_dir,
139 inno_build_dir,
140 staging_dir,
141 iscc_exe,
142 version,
143 arch="x64" if "x86_64" in target_triple else None,
144 )
145
146
147 def build_installer(
148 source_dir: pathlib.Path,
149 inno_build_dir: pathlib.Path,
150 staging_dir: pathlib.Path,
151 iscc_exe: pathlib.Path,
152 version,
153 arch=None,
154 suffix="",
155 ):
156 """Build an Inno installer from staged Mercurial files.
157
158 This function is agnostic about how to build Mercurial. It just
159 cares that Mercurial files are in ``staging_dir``.
160 """
161 inno_source_dir = source_dir / "contrib" / "packaging" / "inno"
162
107 # The final package layout is simply a mirror of the staging directory.
163 # The final package layout is simply a mirror of the staging directory.
108 package_files = []
164 package_files = []
109 for root, dirs, files in os.walk(staging_dir):
165 for root, dirs, files in os.walk(staging_dir):
110 dirs.sort()
166 dirs.sort()
111
167
112 root = pathlib.Path(root)
168 root = pathlib.Path(root)
113
169
114 for f in sorted(files):
170 for f in sorted(files):
115 full = root / f
171 full = root / f
116 rel = full.relative_to(staging_dir)
172 rel = full.relative_to(staging_dir)
117 if str(rel.parent) == '.':
173 if str(rel.parent) == '.':
118 dest_dir = '{app}'
174 dest_dir = '{app}'
119 else:
175 else:
120 dest_dir = '{app}\\%s' % rel.parent
176 dest_dir = '{app}\\%s' % rel.parent
121
177
122 package_files.append(
178 package_files.append(
123 {
179 {
124 'source': rel,
180 'source': rel,
125 'dest_dir': dest_dir,
181 'dest_dir': dest_dir,
126 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None),
182 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None),
127 }
183 }
128 )
184 )
129
185
130 print('creating installer')
186 print('creating installer')
131
187
132 # Install Inno files by rendering a template.
188 # Install Inno files by rendering a template.
133 jinja_env = jinja2.Environment(
189 jinja_env = jinja2.Environment(
134 loader=jinja2.FileSystemLoader(str(inno_source_dir)),
190 loader=jinja2.FileSystemLoader(str(inno_source_dir)),
135 # Need to change these to prevent conflict with Inno Setup.
191 # Need to change these to prevent conflict with Inno Setup.
136 comment_start_string='{##',
192 comment_start_string='{##',
137 comment_end_string='##}',
193 comment_end_string='##}',
138 )
194 )
139
195
140 try:
196 try:
141 template = jinja_env.get_template('mercurial.iss')
197 template = jinja_env.get_template('mercurial.iss')
142 except jinja2.TemplateSyntaxError as e:
198 except jinja2.TemplateSyntaxError as e:
143 raise Exception(
199 raise Exception(
144 'template syntax error at %s:%d: %s'
200 'template syntax error at %s:%d: %s'
145 % (e.name, e.lineno, e.message,)
201 % (e.name, e.lineno, e.message,)
146 )
202 )
147
203
148 content = template.render(package_files=package_files)
204 content = template.render(package_files=package_files)
149
205
150 with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh:
206 with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh:
151 fh.write(content)
207 fh.write(content)
152
208
153 # Copy additional files used by Inno.
209 # Copy additional files used by Inno.
154 for p in ('mercurial.ico', 'postinstall.txt'):
210 for p in ('mercurial.ico', 'postinstall.txt'):
155 shutil.copyfile(
211 shutil.copyfile(
156 source_dir / 'contrib' / 'win32' / p, inno_build_dir / p
212 source_dir / 'contrib' / 'win32' / p, inno_build_dir / p
157 )
213 )
158
214
159 args = [str(iscc_exe)]
215 args = [str(iscc_exe)]
160
216
161 if vc_x64:
217 if arch:
162 args.append('/dARCH=x64')
218 args.append('/dARCH=%s' % arch)
219 args.append('/dSUFFIX=-%s%s' % (arch, suffix))
220 else:
221 args.append('/dSUFFIX=-x86%s' % suffix)
163
222
164 if not version:
223 if not version:
165 version = read_version_py(source_dir)
224 version = read_version_py(source_dir)
166
225
167 args.append('/dVERSION=%s' % version)
226 args.append('/dVERSION=%s' % version)
168 args.append('/dQUAD_VERSION=%s' % normalize_windows_version(version))
227 args.append('/dQUAD_VERSION=%s' % normalize_windows_version(version))
169
228
170 args.append('/Odist')
229 args.append('/Odist')
171 args.append(str(inno_build_dir / 'mercurial.iss'))
230 args.append(str(inno_build_dir / 'mercurial.iss'))
172
231
173 subprocess.run(args, cwd=str(source_dir), check=True)
232 subprocess.run(args, cwd=str(source_dir), check=True)
@@ -1,286 +1,338 b''
1 # util.py - Common packaging utility code.
1 # util.py - Common packaging utility code.
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import distutils.version
10 import distutils.version
11 import getpass
11 import getpass
12 import glob
12 import glob
13 import os
13 import os
14 import pathlib
14 import pathlib
15 import re
15 import re
16 import shutil
16 import shutil
17 import subprocess
17 import subprocess
18 import tarfile
18 import tarfile
19 import zipfile
19 import zipfile
20
20
21
21
22 def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path):
22 def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path):
23 with tarfile.open(source, 'r') as tf:
23 with tarfile.open(source, 'r') as tf:
24 tf.extractall(dest)
24 tf.extractall(dest)
25
25
26
26
27 def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path):
27 def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path):
28 with zipfile.ZipFile(source, 'r') as zf:
28 with zipfile.ZipFile(source, 'r') as zf:
29 zf.extractall(dest)
29 zf.extractall(dest)
30
30
31
31
32 def find_vc_runtime_files(x64=False):
32 def find_vc_runtime_dll(x64=False):
33 """Finds Visual C++ Runtime DLL to include in distribution."""
34 # We invoke vswhere to find the latest Visual Studio install.
35 vswhere = (
36 pathlib.Path(os.environ["ProgramFiles(x86)"])
37 / "Microsoft Visual Studio"
38 / "Installer"
39 / "vswhere.exe"
40 )
41
42 if not vswhere.exists():
43 raise Exception(
44 "could not find vswhere.exe: %s does not exist" % vswhere
45 )
46
47 args = [
48 str(vswhere),
49 # -products * is necessary to return results from Build Tools
50 # (as opposed to full IDE installs).
51 "-products",
52 "*",
53 "-requires",
54 "Microsoft.VisualCpp.Redist.14.Latest",
55 "-latest",
56 "-property",
57 "installationPath",
58 ]
59
60 vs_install_path = pathlib.Path(
61 os.fsdecode(subprocess.check_output(args).strip())
62 )
63
64 # This just gets us a path like
65 # C:\Program Files (x86)\Microsoft Visual Studio\2019\Community
66 # Actually vcruntime140.dll is under a path like:
67 # VC\Redist\MSVC\<version>\<arch>\Microsoft.VC14<X>.CRT\vcruntime140.dll.
68
69 arch = "x64" if x64 else "x86"
70
71 search_glob = (
72 r"%s\VC\Redist\MSVC\*\%s\Microsoft.VC14*.CRT\vcruntime140.dll"
73 % (vs_install_path, arch)
74 )
75
76 candidates = glob.glob(search_glob, recursive=True)
77
78 for candidate in reversed(candidates):
79 return pathlib.Path(candidate)
80
81 raise Exception("could not find vcruntime140.dll")
82
83
84 def find_legacy_vc_runtime_files(x64=False):
33 """Finds Visual C++ Runtime DLLs to include in distribution."""
85 """Finds Visual C++ Runtime DLLs to include in distribution."""
34 winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS'
86 winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS'
35
87
36 prefix = 'amd64' if x64 else 'x86'
88 prefix = 'amd64' if x64 else 'x86'
37
89
38 candidates = sorted(
90 candidates = sorted(
39 p
91 p
40 for p in os.listdir(winsxs)
92 for p in os.listdir(winsxs)
41 if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
93 if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
42 )
94 )
43
95
44 for p in candidates:
96 for p in candidates:
45 print('found candidate VC runtime: %s' % p)
97 print('found candidate VC runtime: %s' % p)
46
98
47 # Take the newest version.
99 # Take the newest version.
48 version = candidates[-1]
100 version = candidates[-1]
49
101
50 d = winsxs / version
102 d = winsxs / version
51
103
52 return [
104 return [
53 d / 'msvcm90.dll',
105 d / 'msvcm90.dll',
54 d / 'msvcp90.dll',
106 d / 'msvcp90.dll',
55 d / 'msvcr90.dll',
107 d / 'msvcr90.dll',
56 winsxs / 'Manifests' / ('%s.manifest' % version),
108 winsxs / 'Manifests' / ('%s.manifest' % version),
57 ]
109 ]
58
110
59
111
60 def windows_10_sdk_info():
112 def windows_10_sdk_info():
61 """Resolves information about the Windows 10 SDK."""
113 """Resolves information about the Windows 10 SDK."""
62
114
63 base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10'
115 base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10'
64
116
65 if not base.is_dir():
117 if not base.is_dir():
66 raise Exception('unable to find Windows 10 SDK at %s' % base)
118 raise Exception('unable to find Windows 10 SDK at %s' % base)
67
119
68 # Find the latest version.
120 # Find the latest version.
69 bin_base = base / 'bin'
121 bin_base = base / 'bin'
70
122
71 versions = [v for v in os.listdir(bin_base) if v.startswith('10.')]
123 versions = [v for v in os.listdir(bin_base) if v.startswith('10.')]
72 version = sorted(versions, reverse=True)[0]
124 version = sorted(versions, reverse=True)[0]
73
125
74 bin_version = bin_base / version
126 bin_version = bin_base / version
75
127
76 return {
128 return {
77 'root': base,
129 'root': base,
78 'version': version,
130 'version': version,
79 'bin_root': bin_version,
131 'bin_root': bin_version,
80 'bin_x86': bin_version / 'x86',
132 'bin_x86': bin_version / 'x86',
81 'bin_x64': bin_version / 'x64',
133 'bin_x64': bin_version / 'x64',
82 }
134 }
83
135
84
136
85 def normalize_windows_version(version):
137 def normalize_windows_version(version):
86 """Normalize Mercurial version string so WiX/Inno accepts it.
138 """Normalize Mercurial version string so WiX/Inno accepts it.
87
139
88 Version strings have to be numeric ``A.B.C[.D]`` to conform with MSI's
140 Version strings have to be numeric ``A.B.C[.D]`` to conform with MSI's
89 requirements.
141 requirements.
90
142
91 We normalize RC version or the commit count to a 4th version component.
143 We normalize RC version or the commit count to a 4th version component.
92 We store this in the 4th component because ``A.B.C`` releases do occur
144 We store this in the 4th component because ``A.B.C`` releases do occur
93 and we want an e.g. ``5.3rc0`` version to be semantically less than a
145 and we want an e.g. ``5.3rc0`` version to be semantically less than a
94 ``5.3.1rc2`` version. This requires always reserving the 3rd version
146 ``5.3.1rc2`` version. This requires always reserving the 3rd version
95 component for the point release and the ``X.YrcN`` release is always
147 component for the point release and the ``X.YrcN`` release is always
96 point release 0.
148 point release 0.
97
149
98 In the case of an RC and presence of ``+`` suffix data, we can't use both
150 In the case of an RC and presence of ``+`` suffix data, we can't use both
99 because the version format is limited to 4 components. We choose to use
151 because the version format is limited to 4 components. We choose to use
100 RC and throw away the commit count in the suffix. This means we could
152 RC and throw away the commit count in the suffix. This means we could
101 produce multiple installers with the same normalized version string.
153 produce multiple installers with the same normalized version string.
102
154
103 >>> normalize_windows_version("5.3")
155 >>> normalize_windows_version("5.3")
104 '5.3.0'
156 '5.3.0'
105
157
106 >>> normalize_windows_version("5.3rc0")
158 >>> normalize_windows_version("5.3rc0")
107 '5.3.0.0'
159 '5.3.0.0'
108
160
109 >>> normalize_windows_version("5.3rc1")
161 >>> normalize_windows_version("5.3rc1")
110 '5.3.0.1'
162 '5.3.0.1'
111
163
112 >>> normalize_windows_version("5.3rc1+2-abcdef")
164 >>> normalize_windows_version("5.3rc1+2-abcdef")
113 '5.3.0.1'
165 '5.3.0.1'
114
166
115 >>> normalize_windows_version("5.3+2-abcdef")
167 >>> normalize_windows_version("5.3+2-abcdef")
116 '5.3.0.2'
168 '5.3.0.2'
117 """
169 """
118 if '+' in version:
170 if '+' in version:
119 version, extra = version.split('+', 1)
171 version, extra = version.split('+', 1)
120 else:
172 else:
121 extra = None
173 extra = None
122
174
123 # 4.9rc0
175 # 4.9rc0
124 if version[:-1].endswith('rc'):
176 if version[:-1].endswith('rc'):
125 rc = int(version[-1:])
177 rc = int(version[-1:])
126 version = version[:-3]
178 version = version[:-3]
127 else:
179 else:
128 rc = None
180 rc = None
129
181
130 # Ensure we have at least X.Y version components.
182 # Ensure we have at least X.Y version components.
131 versions = [int(v) for v in version.split('.')]
183 versions = [int(v) for v in version.split('.')]
132 while len(versions) < 3:
184 while len(versions) < 3:
133 versions.append(0)
185 versions.append(0)
134
186
135 if len(versions) < 4:
187 if len(versions) < 4:
136 if rc is not None:
188 if rc is not None:
137 versions.append(rc)
189 versions.append(rc)
138 elif extra:
190 elif extra:
139 # <commit count>-<hash>+<date>
191 # <commit count>-<hash>+<date>
140 versions.append(int(extra.split('-')[0]))
192 versions.append(int(extra.split('-')[0]))
141
193
142 return '.'.join('%d' % x for x in versions[0:4])
194 return '.'.join('%d' % x for x in versions[0:4])
143
195
144
196
145 def find_signtool():
197 def find_signtool():
146 """Find signtool.exe from the Windows SDK."""
198 """Find signtool.exe from the Windows SDK."""
147 sdk = windows_10_sdk_info()
199 sdk = windows_10_sdk_info()
148
200
149 for key in ('bin_x64', 'bin_x86'):
201 for key in ('bin_x64', 'bin_x86'):
150 p = sdk[key] / 'signtool.exe'
202 p = sdk[key] / 'signtool.exe'
151
203
152 if p.exists():
204 if p.exists():
153 return p
205 return p
154
206
155 raise Exception('could not find signtool.exe in Windows 10 SDK')
207 raise Exception('could not find signtool.exe in Windows 10 SDK')
156
208
157
209
158 def sign_with_signtool(
210 def sign_with_signtool(
159 file_path,
211 file_path,
160 description,
212 description,
161 subject_name=None,
213 subject_name=None,
162 cert_path=None,
214 cert_path=None,
163 cert_password=None,
215 cert_password=None,
164 timestamp_url=None,
216 timestamp_url=None,
165 ):
217 ):
166 """Digitally sign a file with signtool.exe.
218 """Digitally sign a file with signtool.exe.
167
219
168 ``file_path`` is file to sign.
220 ``file_path`` is file to sign.
169 ``description`` is text that goes in the signature.
221 ``description`` is text that goes in the signature.
170
222
171 The signing certificate can be specified by ``cert_path`` or
223 The signing certificate can be specified by ``cert_path`` or
172 ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments
224 ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments
173 to signtool.exe, respectively.
225 to signtool.exe, respectively.
174
226
175 The certificate password can be specified via ``cert_password``. If
227 The certificate password can be specified via ``cert_password``. If
176 not provided, you will be prompted for the password.
228 not provided, you will be prompted for the password.
177
229
178 ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr``
230 ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr``
179 argument to signtool.exe).
231 argument to signtool.exe).
180 """
232 """
181 if cert_path and subject_name:
233 if cert_path and subject_name:
182 raise ValueError('cannot specify both cert_path and subject_name')
234 raise ValueError('cannot specify both cert_path and subject_name')
183
235
184 while cert_path and not cert_password:
236 while cert_path and not cert_password:
185 cert_password = getpass.getpass('password for %s: ' % cert_path)
237 cert_password = getpass.getpass('password for %s: ' % cert_path)
186
238
187 args = [
239 args = [
188 str(find_signtool()),
240 str(find_signtool()),
189 'sign',
241 'sign',
190 '/v',
242 '/v',
191 '/fd',
243 '/fd',
192 'sha256',
244 'sha256',
193 '/d',
245 '/d',
194 description,
246 description,
195 ]
247 ]
196
248
197 if cert_path:
249 if cert_path:
198 args.extend(['/f', str(cert_path), '/p', cert_password])
250 args.extend(['/f', str(cert_path), '/p', cert_password])
199 elif subject_name:
251 elif subject_name:
200 args.extend(['/n', subject_name])
252 args.extend(['/n', subject_name])
201
253
202 if timestamp_url:
254 if timestamp_url:
203 args.extend(['/tr', timestamp_url, '/td', 'sha256'])
255 args.extend(['/tr', timestamp_url, '/td', 'sha256'])
204
256
205 args.append(str(file_path))
257 args.append(str(file_path))
206
258
207 print('signing %s' % file_path)
259 print('signing %s' % file_path)
208 subprocess.run(args, check=True)
260 subprocess.run(args, check=True)
209
261
210
262
211 PRINT_PYTHON_INFO = '''
263 PRINT_PYTHON_INFO = '''
212 import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version()))
264 import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version()))
213 '''.strip()
265 '''.strip()
214
266
215
267
216 def python_exe_info(python_exe: pathlib.Path):
268 def python_exe_info(python_exe: pathlib.Path):
217 """Obtain information about a Python executable."""
269 """Obtain information about a Python executable."""
218
270
219 res = subprocess.check_output([str(python_exe), '-c', PRINT_PYTHON_INFO])
271 res = subprocess.check_output([str(python_exe), '-c', PRINT_PYTHON_INFO])
220
272
221 arch, version = res.decode('utf-8').split(':')
273 arch, version = res.decode('utf-8').split(':')
222
274
223 version = distutils.version.LooseVersion(version)
275 version = distutils.version.LooseVersion(version)
224
276
225 return {
277 return {
226 'arch': arch,
278 'arch': arch,
227 'version': version,
279 'version': version,
228 'py3': version >= distutils.version.LooseVersion('3'),
280 'py3': version >= distutils.version.LooseVersion('3'),
229 }
281 }
230
282
231
283
232 def process_install_rules(
284 def process_install_rules(
233 rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
285 rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
234 ):
286 ):
235 for source, dest in rules:
287 for source, dest in rules:
236 if '*' in source:
288 if '*' in source:
237 if not dest.endswith('/'):
289 if not dest.endswith('/'):
238 raise ValueError('destination must end in / when globbing')
290 raise ValueError('destination must end in / when globbing')
239
291
240 # We strip off the source path component before the first glob
292 # We strip off the source path component before the first glob
241 # character to construct the relative install path.
293 # character to construct the relative install path.
242 prefix_end_index = source[: source.index('*')].rindex('/')
294 prefix_end_index = source[: source.index('*')].rindex('/')
243 relative_prefix = source_dir / source[0:prefix_end_index]
295 relative_prefix = source_dir / source[0:prefix_end_index]
244
296
245 for res in glob.glob(str(source_dir / source), recursive=True):
297 for res in glob.glob(str(source_dir / source), recursive=True):
246 source_path = pathlib.Path(res)
298 source_path = pathlib.Path(res)
247
299
248 if source_path.is_dir():
300 if source_path.is_dir():
249 continue
301 continue
250
302
251 rel_path = source_path.relative_to(relative_prefix)
303 rel_path = source_path.relative_to(relative_prefix)
252
304
253 dest_path = dest_dir / dest[:-1] / rel_path
305 dest_path = dest_dir / dest[:-1] / rel_path
254
306
255 dest_path.parent.mkdir(parents=True, exist_ok=True)
307 dest_path.parent.mkdir(parents=True, exist_ok=True)
256 print('copying %s to %s' % (source_path, dest_path))
308 print('copying %s to %s' % (source_path, dest_path))
257 shutil.copy(source_path, dest_path)
309 shutil.copy(source_path, dest_path)
258
310
259 # Simple file case.
311 # Simple file case.
260 else:
312 else:
261 source_path = pathlib.Path(source)
313 source_path = pathlib.Path(source)
262
314
263 if dest.endswith('/'):
315 if dest.endswith('/'):
264 dest_path = pathlib.Path(dest) / source_path.name
316 dest_path = pathlib.Path(dest) / source_path.name
265 else:
317 else:
266 dest_path = pathlib.Path(dest)
318 dest_path = pathlib.Path(dest)
267
319
268 full_source_path = source_dir / source_path
320 full_source_path = source_dir / source_path
269 full_dest_path = dest_dir / dest_path
321 full_dest_path = dest_dir / dest_path
270
322
271 full_dest_path.parent.mkdir(parents=True, exist_ok=True)
323 full_dest_path.parent.mkdir(parents=True, exist_ok=True)
272 shutil.copy(full_source_path, full_dest_path)
324 shutil.copy(full_source_path, full_dest_path)
273 print('copying %s to %s' % (full_source_path, full_dest_path))
325 print('copying %s to %s' % (full_source_path, full_dest_path))
274
326
275
327
276 def read_version_py(source_dir):
328 def read_version_py(source_dir):
277 """Read the mercurial/__version__.py file to resolve the version string."""
329 """Read the mercurial/__version__.py file to resolve the version string."""
278 p = source_dir / 'mercurial' / '__version__.py'
330 p = source_dir / 'mercurial' / '__version__.py'
279
331
280 with p.open('r', encoding='utf-8') as fh:
332 with p.open('r', encoding='utf-8') as fh:
281 m = re.search('version = b"([^"]+)"', fh.read(), re.MULTILINE)
333 m = re.search('version = b"([^"]+)"', fh.read(), re.MULTILINE)
282
334
283 if not m:
335 if not m:
284 raise Exception('could not parse %s' % p)
336 raise Exception('could not parse %s' % p)
285
337
286 return m.group(1)
338 return m.group(1)
@@ -1,499 +1,535 b''
1 # wix.py - WiX installer functionality
1 # wix.py - WiX installer functionality
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import collections
10 import collections
11 import os
11 import os
12 import pathlib
12 import pathlib
13 import re
13 import re
14 import shutil
14 import shutil
15 import subprocess
15 import subprocess
16 import typing
16 import typing
17 import uuid
17 import uuid
18 import xml.dom.minidom
18 import xml.dom.minidom
19
19
20 from .downloads import download_entry
20 from .downloads import download_entry
21 from .py2exe import (
21 from .py2exe import (
22 build_py2exe,
22 build_py2exe,
23 stage_install,
23 stage_install,
24 )
24 )
25 from .pyoxidizer import run_pyoxidizer
25 from .util import (
26 from .util import (
26 extract_zip_to_directory,
27 extract_zip_to_directory,
27 normalize_windows_version,
28 normalize_windows_version,
28 process_install_rules,
29 process_install_rules,
29 sign_with_signtool,
30 sign_with_signtool,
30 )
31 )
31
32
32
33
33 EXTRA_PACKAGES = {
34 EXTRA_PACKAGES = {
34 'dulwich',
35 'dulwich',
35 'distutils',
36 'distutils',
36 'keyring',
37 'keyring',
37 'pygments',
38 'pygments',
38 'win32ctypes',
39 'win32ctypes',
39 }
40 }
40
41
41
42
42 EXTRA_INSTALL_RULES = [
43 EXTRA_INSTALL_RULES = [
43 ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'),
44 ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'),
44 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
45 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
45 ]
46 ]
46
47
47 STAGING_REMOVE_FILES = [
48 STAGING_REMOVE_FILES = [
48 # We use the RTF variant.
49 # We use the RTF variant.
49 'copying.txt',
50 'copying.txt',
50 ]
51 ]
51
52
52 SHORTCUTS = {
53 SHORTCUTS = {
53 # hg.1.html'
54 # hg.1.html'
54 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': {
55 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': {
55 'Name': 'Mercurial Command Reference',
56 'Name': 'Mercurial Command Reference',
56 },
57 },
57 # hgignore.5.html
58 # hgignore.5.html
58 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': {
59 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': {
59 'Name': 'Mercurial Ignore Files',
60 'Name': 'Mercurial Ignore Files',
60 },
61 },
61 # hgrc.5.html
62 # hgrc.5.html
62 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': {
63 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': {
63 'Name': 'Mercurial Configuration Files',
64 'Name': 'Mercurial Configuration Files',
64 },
65 },
65 }
66 }
66
67
67
68
68 def find_version(source_dir: pathlib.Path):
69 def find_version(source_dir: pathlib.Path):
69 version_py = source_dir / 'mercurial' / '__version__.py'
70 version_py = source_dir / 'mercurial' / '__version__.py'
70
71
71 with version_py.open('r', encoding='utf-8') as fh:
72 with version_py.open('r', encoding='utf-8') as fh:
72 source = fh.read().strip()
73 source = fh.read().strip()
73
74
74 m = re.search('version = b"(.*)"', source)
75 m = re.search('version = b"(.*)"', source)
75 return m.group(1)
76 return m.group(1)
76
77
77
78
78 def ensure_vc90_merge_modules(build_dir):
79 def ensure_vc90_merge_modules(build_dir):
79 x86 = (
80 x86 = (
80 download_entry(
81 download_entry(
81 'vc9-crt-x86-msm',
82 'vc9-crt-x86-msm',
82 build_dir,
83 build_dir,
83 local_name='microsoft.vcxx.crt.x86_msm.msm',
84 local_name='microsoft.vcxx.crt.x86_msm.msm',
84 )[0],
85 )[0],
85 download_entry(
86 download_entry(
86 'vc9-crt-x86-msm-policy',
87 'vc9-crt-x86-msm-policy',
87 build_dir,
88 build_dir,
88 local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
89 local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
89 )[0],
90 )[0],
90 )
91 )
91
92
92 x64 = (
93 x64 = (
93 download_entry(
94 download_entry(
94 'vc9-crt-x64-msm',
95 'vc9-crt-x64-msm',
95 build_dir,
96 build_dir,
96 local_name='microsoft.vcxx.crt.x64_msm.msm',
97 local_name='microsoft.vcxx.crt.x64_msm.msm',
97 )[0],
98 )[0],
98 download_entry(
99 download_entry(
99 'vc9-crt-x64-msm-policy',
100 'vc9-crt-x64-msm-policy',
100 build_dir,
101 build_dir,
101 local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
102 local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
102 )[0],
103 )[0],
103 )
104 )
104 return {
105 return {
105 'x86': x86,
106 'x86': x86,
106 'x64': x64,
107 'x64': x64,
107 }
108 }
108
109
109
110
110 def run_candle(wix, cwd, wxs, source_dir, defines=None):
111 def run_candle(wix, cwd, wxs, source_dir, defines=None):
111 args = [
112 args = [
112 str(wix / 'candle.exe'),
113 str(wix / 'candle.exe'),
113 '-nologo',
114 '-nologo',
114 str(wxs),
115 str(wxs),
115 '-dSourceDir=%s' % source_dir,
116 '-dSourceDir=%s' % source_dir,
116 ]
117 ]
117
118
118 if defines:
119 if defines:
119 args.extend('-d%s=%s' % define for define in sorted(defines.items()))
120 args.extend('-d%s=%s' % define for define in sorted(defines.items()))
120
121
121 subprocess.run(args, cwd=str(cwd), check=True)
122 subprocess.run(args, cwd=str(cwd), check=True)
122
123
123
124
124 def make_post_build_signing_fn(
125 name,
126 subject_name=None,
127 cert_path=None,
128 cert_password=None,
129 timestamp_url=None,
130 ):
131 """Create a callable that will use signtool to sign hg.exe."""
132
133 def post_build_sign(source_dir, build_dir, dist_dir, version):
134 description = '%s %s' % (name, version)
135
136 sign_with_signtool(
137 dist_dir / 'hg.exe',
138 description,
139 subject_name=subject_name,
140 cert_path=cert_path,
141 cert_password=cert_password,
142 timestamp_url=timestamp_url,
143 )
144
145 return post_build_sign
146
147
148 def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str:
125 def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str:
149 """Create XML string listing every file to be installed."""
126 """Create XML string listing every file to be installed."""
150
127
151 # We derive GUIDs from a deterministic file path identifier.
128 # We derive GUIDs from a deterministic file path identifier.
152 # We shoehorn the name into something that looks like a URL because
129 # We shoehorn the name into something that looks like a URL because
153 # the UUID namespaces are supposed to work that way (even though
130 # the UUID namespaces are supposed to work that way (even though
154 # the input data probably is never validated).
131 # the input data probably is never validated).
155
132
156 doc = xml.dom.minidom.parseString(
133 doc = xml.dom.minidom.parseString(
157 '<?xml version="1.0" encoding="utf-8"?>'
134 '<?xml version="1.0" encoding="utf-8"?>'
158 '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
135 '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
159 '</Wix>'
136 '</Wix>'
160 )
137 )
161
138
162 # Assemble the install layout by directory. This makes it easier to
139 # Assemble the install layout by directory. This makes it easier to
163 # emit XML, since each directory has separate entities.
140 # emit XML, since each directory has separate entities.
164 manifest = collections.defaultdict(dict)
141 manifest = collections.defaultdict(dict)
165
142
166 for root, dirs, files in os.walk(staging_dir):
143 for root, dirs, files in os.walk(staging_dir):
167 dirs.sort()
144 dirs.sort()
168
145
169 root = pathlib.Path(root)
146 root = pathlib.Path(root)
170 rel_dir = root.relative_to(staging_dir)
147 rel_dir = root.relative_to(staging_dir)
171
148
172 for i in range(len(rel_dir.parts)):
149 for i in range(len(rel_dir.parts)):
173 parent = '/'.join(rel_dir.parts[0 : i + 1])
150 parent = '/'.join(rel_dir.parts[0 : i + 1])
174 manifest.setdefault(parent, {})
151 manifest.setdefault(parent, {})
175
152
176 for f in sorted(files):
153 for f in sorted(files):
177 full = root / f
154 full = root / f
178 manifest[str(rel_dir).replace('\\', '/')][full.name] = full
155 manifest[str(rel_dir).replace('\\', '/')][full.name] = full
179
156
180 component_groups = collections.defaultdict(list)
157 component_groups = collections.defaultdict(list)
181
158
182 # Now emit a <Fragment> for each directory.
159 # Now emit a <Fragment> for each directory.
183 # Each directory is composed of a <DirectoryRef> pointing to its parent
160 # Each directory is composed of a <DirectoryRef> pointing to its parent
184 # and defines child <Directory>'s and a <Component> with all the files.
161 # and defines child <Directory>'s and a <Component> with all the files.
185 for dir_name, entries in sorted(manifest.items()):
162 for dir_name, entries in sorted(manifest.items()):
186 # The directory id is derived from the path. But the root directory
163 # The directory id is derived from the path. But the root directory
187 # is special.
164 # is special.
188 if dir_name == '.':
165 if dir_name == '.':
189 parent_directory_id = 'INSTALLDIR'
166 parent_directory_id = 'INSTALLDIR'
190 else:
167 else:
191 parent_directory_id = 'hg.dir.%s' % dir_name.replace('/', '.')
168 parent_directory_id = 'hg.dir.%s' % dir_name.replace('/', '.')
192
169
193 fragment = doc.createElement('Fragment')
170 fragment = doc.createElement('Fragment')
194 directory_ref = doc.createElement('DirectoryRef')
171 directory_ref = doc.createElement('DirectoryRef')
195 directory_ref.setAttribute('Id', parent_directory_id)
172 directory_ref.setAttribute('Id', parent_directory_id)
196
173
197 # Add <Directory> entries for immediate children directories.
174 # Add <Directory> entries for immediate children directories.
198 for possible_child in sorted(manifest.keys()):
175 for possible_child in sorted(manifest.keys()):
199 if (
176 if (
200 dir_name == '.'
177 dir_name == '.'
201 and '/' not in possible_child
178 and '/' not in possible_child
202 and possible_child != '.'
179 and possible_child != '.'
203 ):
180 ):
204 child_directory_id = 'hg.dir.%s' % possible_child
181 child_directory_id = 'hg.dir.%s' % possible_child
205 name = possible_child
182 name = possible_child
206 else:
183 else:
207 if not possible_child.startswith('%s/' % dir_name):
184 if not possible_child.startswith('%s/' % dir_name):
208 continue
185 continue
209 name = possible_child[len(dir_name) + 1 :]
186 name = possible_child[len(dir_name) + 1 :]
210 if '/' in name:
187 if '/' in name:
211 continue
188 continue
212
189
213 child_directory_id = 'hg.dir.%s' % possible_child.replace(
190 child_directory_id = 'hg.dir.%s' % possible_child.replace(
214 '/', '.'
191 '/', '.'
215 )
192 )
216
193
217 directory = doc.createElement('Directory')
194 directory = doc.createElement('Directory')
218 directory.setAttribute('Id', child_directory_id)
195 directory.setAttribute('Id', child_directory_id)
219 directory.setAttribute('Name', name)
196 directory.setAttribute('Name', name)
220 directory_ref.appendChild(directory)
197 directory_ref.appendChild(directory)
221
198
222 # Add <Component>s for files in this directory.
199 # Add <Component>s for files in this directory.
223 for rel, source_path in sorted(entries.items()):
200 for rel, source_path in sorted(entries.items()):
224 if dir_name == '.':
201 if dir_name == '.':
225 full_rel = rel
202 full_rel = rel
226 else:
203 else:
227 full_rel = '%s/%s' % (dir_name, rel)
204 full_rel = '%s/%s' % (dir_name, rel)
228
205
229 component_unique_id = (
206 component_unique_id = (
230 'https://www.mercurial-scm.org/wix-installer/0/component/%s'
207 'https://www.mercurial-scm.org/wix-installer/0/component/%s'
231 % full_rel
208 % full_rel
232 )
209 )
233 component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id)
210 component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id)
234 component_id = 'hg.component.%s' % str(component_guid).replace(
211 component_id = 'hg.component.%s' % str(component_guid).replace(
235 '-', '_'
212 '-', '_'
236 )
213 )
237
214
238 component = doc.createElement('Component')
215 component = doc.createElement('Component')
239
216
240 component.setAttribute('Id', component_id)
217 component.setAttribute('Id', component_id)
241 component.setAttribute('Guid', str(component_guid).upper())
218 component.setAttribute('Guid', str(component_guid).upper())
242 component.setAttribute('Win64', 'yes' if is_x64 else 'no')
219 component.setAttribute('Win64', 'yes' if is_x64 else 'no')
243
220
244 # Assign this component to a top-level group.
221 # Assign this component to a top-level group.
245 if dir_name == '.':
222 if dir_name == '.':
246 component_groups['ROOT'].append(component_id)
223 component_groups['ROOT'].append(component_id)
247 elif '/' in dir_name:
224 elif '/' in dir_name:
248 component_groups[dir_name[0 : dir_name.index('/')]].append(
225 component_groups[dir_name[0 : dir_name.index('/')]].append(
249 component_id
226 component_id
250 )
227 )
251 else:
228 else:
252 component_groups[dir_name].append(component_id)
229 component_groups[dir_name].append(component_id)
253
230
254 unique_id = (
231 unique_id = (
255 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel
232 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel
256 )
233 )
257 file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id)
234 file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id)
258
235
259 # IDs have length limits. So use GUID to derive them.
236 # IDs have length limits. So use GUID to derive them.
260 file_guid_normalized = str(file_guid).replace('-', '_')
237 file_guid_normalized = str(file_guid).replace('-', '_')
261 file_id = 'hg.file.%s' % file_guid_normalized
238 file_id = 'hg.file.%s' % file_guid_normalized
262
239
263 file_element = doc.createElement('File')
240 file_element = doc.createElement('File')
264 file_element.setAttribute('Id', file_id)
241 file_element.setAttribute('Id', file_id)
265 file_element.setAttribute('Source', str(source_path))
242 file_element.setAttribute('Source', str(source_path))
266 file_element.setAttribute('KeyPath', 'yes')
243 file_element.setAttribute('KeyPath', 'yes')
267 file_element.setAttribute('ReadOnly', 'yes')
244 file_element.setAttribute('ReadOnly', 'yes')
268
245
269 component.appendChild(file_element)
246 component.appendChild(file_element)
270 directory_ref.appendChild(component)
247 directory_ref.appendChild(component)
271
248
272 fragment.appendChild(directory_ref)
249 fragment.appendChild(directory_ref)
273 doc.documentElement.appendChild(fragment)
250 doc.documentElement.appendChild(fragment)
274
251
275 for group, component_ids in sorted(component_groups.items()):
252 for group, component_ids in sorted(component_groups.items()):
276 fragment = doc.createElement('Fragment')
253 fragment = doc.createElement('Fragment')
277 component_group = doc.createElement('ComponentGroup')
254 component_group = doc.createElement('ComponentGroup')
278 component_group.setAttribute('Id', 'hg.group.%s' % group)
255 component_group.setAttribute('Id', 'hg.group.%s' % group)
279
256
280 for component_id in component_ids:
257 for component_id in component_ids:
281 component_ref = doc.createElement('ComponentRef')
258 component_ref = doc.createElement('ComponentRef')
282 component_ref.setAttribute('Id', component_id)
259 component_ref.setAttribute('Id', component_id)
283 component_group.appendChild(component_ref)
260 component_group.appendChild(component_ref)
284
261
285 fragment.appendChild(component_group)
262 fragment.appendChild(component_group)
286 doc.documentElement.appendChild(fragment)
263 doc.documentElement.appendChild(fragment)
287
264
288 # Add <Shortcut> to files that have it defined.
265 # Add <Shortcut> to files that have it defined.
289 for file_id, metadata in sorted(SHORTCUTS.items()):
266 for file_id, metadata in sorted(SHORTCUTS.items()):
290 els = doc.getElementsByTagName('File')
267 els = doc.getElementsByTagName('File')
291 els = [el for el in els if el.getAttribute('Id') == file_id]
268 els = [el for el in els if el.getAttribute('Id') == file_id]
292
269
293 if not els:
270 if not els:
294 raise Exception('could not find File[Id=%s]' % file_id)
271 raise Exception('could not find File[Id=%s]' % file_id)
295
272
296 for el in els:
273 for el in els:
297 shortcut = doc.createElement('Shortcut')
274 shortcut = doc.createElement('Shortcut')
298 shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id)
275 shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id)
299 shortcut.setAttribute('Directory', 'ProgramMenuDir')
276 shortcut.setAttribute('Directory', 'ProgramMenuDir')
300 shortcut.setAttribute('Icon', 'hgIcon.ico')
277 shortcut.setAttribute('Icon', 'hgIcon.ico')
301 shortcut.setAttribute('IconIndex', '0')
278 shortcut.setAttribute('IconIndex', '0')
302 shortcut.setAttribute('Advertise', 'yes')
279 shortcut.setAttribute('Advertise', 'yes')
303 for k, v in sorted(metadata.items()):
280 for k, v in sorted(metadata.items()):
304 shortcut.setAttribute(k, v)
281 shortcut.setAttribute(k, v)
305
282
306 el.appendChild(shortcut)
283 el.appendChild(shortcut)
307
284
308 return doc.toprettyxml()
285 return doc.toprettyxml()
309
286
310
287
311 def build_installer(
288 def build_installer_py2exe(
312 source_dir: pathlib.Path,
289 source_dir: pathlib.Path,
313 python_exe: pathlib.Path,
290 python_exe: pathlib.Path,
314 msi_name='mercurial',
291 msi_name='mercurial',
315 version=None,
292 version=None,
316 post_build_fn=None,
317 extra_packages_script=None,
293 extra_packages_script=None,
318 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
294 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
319 extra_features: typing.Optional[typing.List[str]] = None,
295 extra_features: typing.Optional[typing.List[str]] = None,
296 signing_info: typing.Optional[typing.Dict[str, str]] = None,
320 ):
297 ):
321 """Build a WiX MSI installer.
298 """Build a WiX MSI installer using py2exe.
322
299
323 ``source_dir`` is the path to the Mercurial source tree to use.
300 ``source_dir`` is the path to the Mercurial source tree to use.
324 ``arch`` is the target architecture. either ``x86`` or ``x64``.
301 ``arch`` is the target architecture. either ``x86`` or ``x64``.
325 ``python_exe`` is the path to the Python executable to use/bundle.
302 ``python_exe`` is the path to the Python executable to use/bundle.
326 ``version`` is the Mercurial version string. If not defined,
303 ``version`` is the Mercurial version string. If not defined,
327 ``mercurial/__version__.py`` will be consulted.
304 ``mercurial/__version__.py`` will be consulted.
328 ``post_build_fn`` is a callable that will be called after building
329 Mercurial but before invoking WiX. It can be used to e.g. facilitate
330 signing. It is passed the paths to the Mercurial source, build, and
331 dist directories and the resolved Mercurial version.
332 ``extra_packages_script`` is a command to be run to inject extra packages
305 ``extra_packages_script`` is a command to be run to inject extra packages
333 into the py2exe binary. It should stage packages into the virtualenv and
306 into the py2exe binary. It should stage packages into the virtualenv and
334 print a null byte followed by a newline-separated list of packages that
307 print a null byte followed by a newline-separated list of packages that
335 should be included in the exe.
308 should be included in the exe.
336 ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}.
309 ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}.
337 ``extra_features`` is a list of additional named Features to include in
310 ``extra_features`` is a list of additional named Features to include in
338 the build. These must match Feature names in one of the wxs scripts.
311 the build. These must match Feature names in one of the wxs scripts.
339 """
312 """
340 arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86'
313 arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86'
341
314
342 hg_build_dir = source_dir / 'build'
315 hg_build_dir = source_dir / 'build'
343 dist_dir = source_dir / 'dist'
344 wix_dir = source_dir / 'contrib' / 'packaging' / 'wix'
345
316
346 requirements_txt = (
317 requirements_txt = (
347 source_dir / 'contrib' / 'packaging' / 'requirements_win32.txt'
318 source_dir / 'contrib' / 'packaging' / 'requirements_win32.txt'
348 )
319 )
349
320
350 build_py2exe(
321 build_py2exe(
351 source_dir,
322 source_dir,
352 hg_build_dir,
323 hg_build_dir,
353 python_exe,
324 python_exe,
354 'wix',
325 'wix',
355 requirements_txt,
326 requirements_txt,
356 extra_packages=EXTRA_PACKAGES,
327 extra_packages=EXTRA_PACKAGES,
357 extra_packages_script=extra_packages_script,
328 extra_packages_script=extra_packages_script,
358 )
329 )
359
330
360 orig_version = version or find_version(source_dir)
361 version = normalize_windows_version(orig_version)
362 print('using version string: %s' % version)
363 if version != orig_version:
364 print('(normalized from: %s)' % orig_version)
365
366 if post_build_fn:
367 post_build_fn(source_dir, hg_build_dir, dist_dir, version)
368
369 build_dir = hg_build_dir / ('wix-%s' % arch)
331 build_dir = hg_build_dir / ('wix-%s' % arch)
370 staging_dir = build_dir / 'stage'
332 staging_dir = build_dir / 'stage'
371
333
372 build_dir.mkdir(exist_ok=True)
334 build_dir.mkdir(exist_ok=True)
373
335
374 # Purge the staging directory for every build so packaging is pristine.
336 # Purge the staging directory for every build so packaging is pristine.
375 if staging_dir.exists():
337 if staging_dir.exists():
376 print('purging %s' % staging_dir)
338 print('purging %s' % staging_dir)
377 shutil.rmtree(staging_dir)
339 shutil.rmtree(staging_dir)
378
340
379 stage_install(source_dir, staging_dir, lower_case=True)
341 stage_install(source_dir, staging_dir, lower_case=True)
380
342
381 # We also install some extra files.
343 # We also install some extra files.
382 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
344 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
383
345
384 # And remove some files we don't want.
346 # And remove some files we don't want.
385 for f in STAGING_REMOVE_FILES:
347 for f in STAGING_REMOVE_FILES:
386 p = staging_dir / f
348 p = staging_dir / f
387 if p.exists():
349 if p.exists():
388 print('removing %s' % p)
350 print('removing %s' % p)
389 p.unlink()
351 p.unlink()
390
352
391 wix_pkg, wix_entry = download_entry('wix', hg_build_dir)
353 return run_wix_packaging(
392 wix_path = hg_build_dir / ('wix-%s' % wix_entry['version'])
354 source_dir,
355 build_dir,
356 staging_dir,
357 arch,
358 version=version,
359 python2=True,
360 msi_name=msi_name,
361 suffix="-python2",
362 extra_wxs=extra_wxs,
363 extra_features=extra_features,
364 signing_info=signing_info,
365 )
366
367
368 def build_installer_pyoxidizer(
369 source_dir: pathlib.Path,
370 target_triple: str,
371 msi_name='mercurial',
372 version=None,
373 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
374 extra_features: typing.Optional[typing.List[str]] = None,
375 signing_info: typing.Optional[typing.Dict[str, str]] = None,
376 ):
377 """Build a WiX MSI installer using PyOxidizer."""
378 hg_build_dir = source_dir / "build"
379 build_dir = hg_build_dir / ("wix-%s" % target_triple)
380 staging_dir = build_dir / "stage"
381
382 arch = "x64" if "x86_64" in target_triple else "x86"
383
384 build_dir.mkdir(parents=True, exist_ok=True)
385 run_pyoxidizer(source_dir, build_dir, staging_dir, target_triple)
386
387 # We also install some extra files.
388 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
389
390 # And remove some files we don't want.
391 for f in STAGING_REMOVE_FILES:
392 p = staging_dir / f
393 if p.exists():
394 print('removing %s' % p)
395 p.unlink()
396
397 return run_wix_packaging(
398 source_dir,
399 build_dir,
400 staging_dir,
401 arch,
402 version,
403 python2=False,
404 msi_name=msi_name,
405 extra_wxs=extra_wxs,
406 extra_features=extra_features,
407 signing_info=signing_info,
408 )
409
410
411 def run_wix_packaging(
412 source_dir: pathlib.Path,
413 build_dir: pathlib.Path,
414 staging_dir: pathlib.Path,
415 arch: str,
416 version: str,
417 python2: bool,
418 msi_name: typing.Optional[str] = "mercurial",
419 suffix: str = "",
420 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
421 extra_features: typing.Optional[typing.List[str]] = None,
422 signing_info: typing.Optional[typing.Dict[str, str]] = None,
423 ):
424 """Invokes WiX to package up a built Mercurial.
425
426 ``signing_info`` is a dict defining properties to facilitate signing the
427 installer. Recognized keys include ``name``, ``subject_name``,
428 ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated,
429 we will sign both the hg.exe and the .msi using the signing credentials
430 specified.
431 """
432
433 orig_version = version or find_version(source_dir)
434 version = normalize_windows_version(orig_version)
435 print('using version string: %s' % version)
436 if version != orig_version:
437 print('(normalized from: %s)' % orig_version)
438
439 if signing_info:
440 sign_with_signtool(
441 staging_dir / "hg.exe",
442 "%s %s" % (signing_info["name"], version),
443 subject_name=signing_info["subject_name"],
444 cert_path=signing_info["cert_path"],
445 cert_password=signing_info["cert_password"],
446 timestamp_url=signing_info["timestamp_url"],
447 )
448
449 wix_dir = source_dir / 'contrib' / 'packaging' / 'wix'
450
451 wix_pkg, wix_entry = download_entry('wix', build_dir)
452 wix_path = build_dir / ('wix-%s' % wix_entry['version'])
393
453
394 if not wix_path.exists():
454 if not wix_path.exists():
395 extract_zip_to_directory(wix_pkg, wix_path)
455 extract_zip_to_directory(wix_pkg, wix_path)
396
456
397 ensure_vc90_merge_modules(hg_build_dir)
457 if python2:
458 ensure_vc90_merge_modules(build_dir)
398
459
399 source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir))
460 source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir))
400
461
401 defines = {'Platform': arch}
462 defines = {'Platform': arch}
402
463
403 # Derive a .wxs file with the staged files.
464 # Derive a .wxs file with the staged files.
404 manifest_wxs = build_dir / 'stage.wxs'
465 manifest_wxs = build_dir / 'stage.wxs'
405 with manifest_wxs.open('w', encoding='utf-8') as fh:
466 with manifest_wxs.open('w', encoding='utf-8') as fh:
406 fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64'))
467 fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64'))
407
468
408 run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines)
469 run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines)
409
470
410 for source, rel_path in sorted((extra_wxs or {}).items()):
471 for source, rel_path in sorted((extra_wxs or {}).items()):
411 run_candle(wix_path, build_dir, source, rel_path, defines=defines)
472 run_candle(wix_path, build_dir, source, rel_path, defines=defines)
412
473
413 source = wix_dir / 'mercurial.wxs'
474 source = wix_dir / 'mercurial.wxs'
414 defines['Version'] = version
475 defines['Version'] = version
415 defines['Comments'] = 'Installs Mercurial version %s' % version
476 defines['Comments'] = 'Installs Mercurial version %s' % version
416 defines['VCRedistSrcDir'] = str(hg_build_dir)
477
478 if python2:
479 defines["PythonVersion"] = "2"
480 defines['VCRedistSrcDir'] = str(build_dir)
481 else:
482 defines["PythonVersion"] = "3"
483
484 if (staging_dir / "lib").exists():
485 defines["MercurialHasLib"] = "1"
486
417 if extra_features:
487 if extra_features:
418 assert all(';' not in f for f in extra_features)
488 assert all(';' not in f for f in extra_features)
419 defines['MercurialExtraFeatures'] = ';'.join(extra_features)
489 defines['MercurialExtraFeatures'] = ';'.join(extra_features)
420
490
421 run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
491 run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
422
492
423 msi_path = (
493 msi_path = (
424 source_dir / 'dist' / ('%s-%s-%s.msi' % (msi_name, orig_version, arch))
494 source_dir
495 / 'dist'
496 / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix))
425 )
497 )
426
498
427 args = [
499 args = [
428 str(wix_path / 'light.exe'),
500 str(wix_path / 'light.exe'),
429 '-nologo',
501 '-nologo',
430 '-ext',
502 '-ext',
431 'WixUIExtension',
503 'WixUIExtension',
432 '-sw1076',
504 '-sw1076',
433 '-spdb',
505 '-spdb',
434 '-o',
506 '-o',
435 str(msi_path),
507 str(msi_path),
436 ]
508 ]
437
509
438 for source, rel_path in sorted((extra_wxs or {}).items()):
510 for source, rel_path in sorted((extra_wxs or {}).items()):
439 assert source.endswith('.wxs')
511 assert source.endswith('.wxs')
440 source = os.path.basename(source)
512 source = os.path.basename(source)
441 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
513 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
442
514
443 args.extend(
515 args.extend(
444 [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
516 [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
445 )
517 )
446
518
447 subprocess.run(args, cwd=str(source_dir), check=True)
519 subprocess.run(args, cwd=str(source_dir), check=True)
448
520
449 print('%s created' % msi_path)
521 print('%s created' % msi_path)
450
522
523 if signing_info:
524 sign_with_signtool(
525 msi_path,
526 "%s %s" % (signing_info["name"], version),
527 subject_name=signing_info["subject_name"],
528 cert_path=signing_info["cert_path"],
529 cert_password=signing_info["cert_password"],
530 timestamp_url=signing_info["timestamp_url"],
531 )
532
451 return {
533 return {
452 'msi_path': msi_path,
534 'msi_path': msi_path,
453 }
535 }
454
455
456 def build_signed_installer(
457 source_dir: pathlib.Path,
458 python_exe: pathlib.Path,
459 name: str,
460 version=None,
461 subject_name=None,
462 cert_path=None,
463 cert_password=None,
464 timestamp_url=None,
465 extra_packages_script=None,
466 extra_wxs=None,
467 extra_features=None,
468 ):
469 """Build an installer with signed executables."""
470
471 post_build_fn = make_post_build_signing_fn(
472 name,
473 subject_name=subject_name,
474 cert_path=cert_path,
475 cert_password=cert_password,
476 timestamp_url=timestamp_url,
477 )
478
479 info = build_installer(
480 source_dir,
481 python_exe=python_exe,
482 msi_name=name.lower(),
483 version=version,
484 post_build_fn=post_build_fn,
485 extra_packages_script=extra_packages_script,
486 extra_wxs=extra_wxs,
487 extra_features=extra_features,
488 )
489
490 description = '%s %s' % (name, version)
491
492 sign_with_signtool(
493 info['msi_path'],
494 description,
495 subject_name=subject_name,
496 cert_path=cert_path,
497 cert_password=cert_password,
498 timestamp_url=timestamp_url,
499 )
@@ -1,83 +1,82 b''
1 ; Script generated by the Inno Setup Script Wizard.
1 ; Script generated by the Inno Setup Script Wizard.
2 ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
2 ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
3
3
4 #ifndef ARCH
4 #ifndef ARCH
5 #define ARCH = "x86"
5 #define ARCH = "x86"
6 #endif
6 #endif
7
7
8 [Setup]
8 [Setup]
9 AppCopyright=Copyright 2005-2020 Matt Mackall and others
9 AppCopyright=Copyright 2005-2020 Matt Mackall and others
10 AppName=Mercurial
10 AppName=Mercurial
11 AppVersion={#VERSION}
11 AppVersion={#VERSION}
12 OutputBaseFilename=Mercurial-{#VERSION}{#SUFFIX}
12 #if ARCH == "x64"
13 #if ARCH == "x64"
13 AppVerName=Mercurial {#VERSION} (64-bit)
14 AppVerName=Mercurial {#VERSION} (64-bit)
14 OutputBaseFilename=Mercurial-{#VERSION}-x64
15 ArchitecturesAllowed=x64
15 ArchitecturesAllowed=x64
16 ArchitecturesInstallIn64BitMode=x64
16 ArchitecturesInstallIn64BitMode=x64
17 #else
17 #else
18 AppVerName=Mercurial {#VERSION}
18 AppVerName=Mercurial {#VERSION}
19 OutputBaseFilename=Mercurial-{#VERSION}
20 #endif
19 #endif
21 InfoAfterFile=../postinstall.txt
20 InfoAfterFile=../postinstall.txt
22 LicenseFile=Copying.txt
21 LicenseFile=Copying.txt
23 ShowLanguageDialog=yes
22 ShowLanguageDialog=yes
24 AppPublisher=Matt Mackall and others
23 AppPublisher=Matt Mackall and others
25 AppPublisherURL=https://mercurial-scm.org/
24 AppPublisherURL=https://mercurial-scm.org/
26 AppSupportURL=https://mercurial-scm.org/
25 AppSupportURL=https://mercurial-scm.org/
27 AppUpdatesURL=https://mercurial-scm.org/
26 AppUpdatesURL=https://mercurial-scm.org/
28 {{ 'AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}' }}
27 {{ 'AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}' }}
29 AppContact=mercurial@mercurial-scm.org
28 AppContact=mercurial@mercurial-scm.org
30 DefaultDirName={pf}\Mercurial
29 DefaultDirName={pf}\Mercurial
31 SourceDir=stage
30 SourceDir=stage
32 VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
31 VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
33 VersionInfoCopyright=Copyright 2005-2020 Matt Mackall and others
32 VersionInfoCopyright=Copyright 2005-2020 Matt Mackall and others
34 VersionInfoCompany=Matt Mackall and others
33 VersionInfoCompany=Matt Mackall and others
35 VersionInfoVersion={#QUAD_VERSION}
34 VersionInfoVersion={#QUAD_VERSION}
36 InternalCompressLevel=max
35 InternalCompressLevel=max
37 SolidCompression=true
36 SolidCompression=true
38 SetupIconFile=../mercurial.ico
37 SetupIconFile=../mercurial.ico
39 AllowNoIcons=true
38 AllowNoIcons=true
40 DefaultGroupName=Mercurial
39 DefaultGroupName=Mercurial
41 PrivilegesRequired=none
40 PrivilegesRequired=none
42 ChangesEnvironment=true
41 ChangesEnvironment=true
43
42
44 [Files]
43 [Files]
45 {% for entry in package_files -%}
44 {% for entry in package_files -%}
46 Source: {{ entry.source }}; DestDir: {{ entry.dest_dir }}
45 Source: {{ entry.source }}; DestDir: {{ entry.dest_dir }}
47 {%- if entry.metadata %}; {{ entry.metadata }}{% endif %}
46 {%- if entry.metadata %}; {{ entry.metadata }}{% endif %}
48 {% endfor %}
47 {% endfor %}
49
48
50 [INI]
49 [INI]
51 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/
50 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/
52
51
53 [UninstallDelete]
52 [UninstallDelete]
54 Type: files; Name: {app}\Mercurial.url
53 Type: files; Name: {app}\Mercurial.url
55 Type: filesandordirs; Name: {app}\defaultrc
54 Type: filesandordirs; Name: {app}\defaultrc
56
55
57 [Icons]
56 [Icons]
58 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
57 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
59 Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.html
58 Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.html
60 Name: {group}\Mercurial Configuration Files; Filename: {app}\Docs\hgrc.5.html
59 Name: {group}\Mercurial Configuration Files; Filename: {app}\Docs\hgrc.5.html
61 Name: {group}\Mercurial Ignore Files; Filename: {app}\Docs\hgignore.5.html
60 Name: {group}\Mercurial Ignore Files; Filename: {app}\Docs\hgignore.5.html
62 Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url
61 Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url
63
62
64 [Tasks]
63 [Tasks]
65 Name: modifypath; Description: Add the installation path to the search path; Flags: unchecked
64 Name: modifypath; Description: Add the installation path to the search path; Flags: unchecked
66
65
67 [Code]
66 [Code]
68 procedure Touch(fn: String);
67 procedure Touch(fn: String);
69 begin
68 begin
70 SaveStringToFile(ExpandConstant(fn), '', False);
69 SaveStringToFile(ExpandConstant(fn), '', False);
71 end;
70 end;
72
71
73 const
72 const
74 ModPathName = 'modifypath';
73 ModPathName = 'modifypath';
75 ModPathType = 'user';
74 ModPathType = 'user';
76
75
77 function ModPathDir(): TArrayOfString;
76 function ModPathDir(): TArrayOfString;
78 begin
77 begin
79 setArrayLength(Result, 1)
78 setArrayLength(Result, 1)
80 Result[0] := ExpandConstant('{app}');
79 Result[0] := ExpandConstant('{app}');
81 end;
80 end;
82
81
83 {% include 'modpath.iss' %}
82 {% include 'modpath.iss' %}
@@ -1,143 +1,152 b''
1 <?xml version='1.0' encoding='windows-1252'?>
1 <?xml version='1.0' encoding='windows-1252'?>
2 <Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'>
2 <Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'>
3
3
4 <!-- Copyright 2010 Steve Borho <steve@borho.org>
4 <!-- Copyright 2010 Steve Borho <steve@borho.org>
5
5
6 This software may be used and distributed according to the terms of the
6 This software may be used and distributed according to the terms of the
7 GNU General Public License version 2 or any later version. -->
7 GNU General Public License version 2 or any later version. -->
8
8
9 <?include guids.wxi ?>
9 <?include guids.wxi ?>
10 <?include defines.wxi ?>
10 <?include defines.wxi ?>
11
11
12 <?if $(var.Platform) = "x64" ?>
12 <?if $(var.Platform) = "x64" ?>
13 <?define PFolder = ProgramFiles64Folder ?>
13 <?define PFolder = ProgramFiles64Folder ?>
14 <?else?>
14 <?else?>
15 <?define PFolder = ProgramFilesFolder ?>
15 <?define PFolder = ProgramFilesFolder ?>
16 <?endif?>
16 <?endif?>
17
17
18 <Product Id='*'
18 <Product Id='*'
19 Name='Mercurial $(var.Version) ($(var.Platform))'
19 Name='Mercurial $(var.Version) ($(var.Platform))'
20 UpgradeCode='$(var.ProductUpgradeCode)'
20 UpgradeCode='$(var.ProductUpgradeCode)'
21 Language='1033' Codepage='1252' Version='$(var.Version)'
21 Language='1033' Codepage='1252' Version='$(var.Version)'
22 Manufacturer='Matt Mackall and others'>
22 Manufacturer='Matt Mackall and others'>
23
23
24 <Package Id='*'
24 <Package Id='*'
25 Keywords='Installer'
25 Keywords='Installer'
26 Description="Mercurial distributed SCM (version $(var.Version))"
26 Description="Mercurial distributed SCM (version $(var.Version))"
27 Comments='$(var.Comments)'
27 Comments='$(var.Comments)'
28 Platform='$(var.Platform)'
28 Platform='$(var.Platform)'
29 Manufacturer='Matt Mackall and others'
29 Manufacturer='Matt Mackall and others'
30 InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' />
30 InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' />
31
31
32 <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1'
32 <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1'
33 CompressionLevel='high' />
33 CompressionLevel='high' />
34 <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" />
34 <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" />
35
35
36 <Condition Message='Mercurial MSI installers require Windows XP or higher'>
36 <Condition Message='Mercurial MSI installers require Windows XP or higher'>
37 VersionNT >= 501
37 VersionNT >= 501
38 </Condition>
38 </Condition>
39
39
40 <Property Id="INSTALLDIR">
40 <Property Id="INSTALLDIR">
41 <ComponentSearch Id='SearchForMainExecutableComponent'
41 <ComponentSearch Id='SearchForMainExecutableComponent'
42 Guid='$(var.ComponentMainExecutableGUID)' />
42 Guid='$(var.ComponentMainExecutableGUID)' />
43 </Property>
43 </Property>
44
44
45 <!--Property Id='ARPCOMMENTS'>any comments</Property-->
45 <!--Property Id='ARPCOMMENTS'>any comments</Property-->
46 <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property>
46 <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property>
47 <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property>
47 <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property>
48 <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property>
48 <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property>
49 <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property>
49 <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property>
50 <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property>
50 <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property>
51 <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property>
51 <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property>
52
52
53 <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property>
53 <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property>
54 <Property Id='REINSTALLMODE'>amus</Property>
54 <Property Id='REINSTALLMODE'>amus</Property>
55
55
56 <!--Auto-accept the license page-->
56 <!--Auto-accept the license page-->
57 <Property Id='LicenseAccepted'>1</Property>
57 <Property Id='LicenseAccepted'>1</Property>
58
58
59 <Directory Id='TARGETDIR' Name='SourceDir'>
59 <Directory Id='TARGETDIR' Name='SourceDir'>
60 <Directory Id='$(var.PFolder)' Name='PFiles'>
60 <Directory Id='$(var.PFolder)' Name='PFiles'>
61 <Directory Id='INSTALLDIR' Name='Mercurial'>
61 <Directory Id='INSTALLDIR' Name='Mercurial'>
62 <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'>
62 <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'>
63 <CreateFolder />
63 <CreateFolder />
64 <Environment Id="Environment" Name="PATH" Part="last" System="yes"
64 <Environment Id="Environment" Name="PATH" Part="last" System="yes"
65 Permanent="no" Value="[INSTALLDIR]" Action="set" />
65 Permanent="no" Value="[INSTALLDIR]" Action="set" />
66 </Component>
66 </Component>
67 </Directory>
67 </Directory>
68 </Directory>
68 </Directory>
69
69
70 <Directory Id="ProgramMenuFolder" Name="Programs">
70 <Directory Id="ProgramMenuFolder" Name="Programs">
71 <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)">
71 <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)">
72 <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'>
72 <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'>
73 <RemoveFolder Id='ProgramMenuDir' On='uninstall' />
73 <RemoveFolder Id='ProgramMenuDir' On='uninstall' />
74 <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string'
74 <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string'
75 Value='[INSTALLDIR]' KeyPath='yes' />
75 Value='[INSTALLDIR]' KeyPath='yes' />
76 <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site'
76 <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site'
77 Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' />
77 Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' />
78 </Component>
78 </Component>
79 </Directory>
79 </Directory>
80 </Directory>
80 </Directory>
81
81
82 <?if $(var.Platform) = "x86" ?>
82 <!-- Install VCRedist merge modules on Python 2. On Python 3,
83 <Merge Id='VCRuntime' DiskId='1' Language='1033'
83 vcruntimeXXX.dll is part of the install layout and gets picked up
84 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' />
84 as a regular file. -->
85 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
85 <?if $(var.PythonVersion) = "2" ?>
86 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' />
86 <?if $(var.Platform) = "x86" ?>
87 <?else?>
87 <Merge Id='VCRuntime' DiskId='1' Language='1033'
88 <Merge Id='VCRuntime' DiskId='1' Language='1033'
88 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' />
89 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' />
89 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
90 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
90 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' />
91 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' />
91 <?else?>
92 <Merge Id='VCRuntime' DiskId='1' Language='1033'
93 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' />
94 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
95 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' />
96 <?endif?>
92 <?endif?>
97 <?endif?>
93 </Directory>
98 </Directory>
94
99
95 <Feature Id='Complete' Title='Mercurial' Description='The complete package'
100 <Feature Id='Complete' Title='Mercurial' Description='The complete package'
96 Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' >
101 Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' >
97 <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app'
102 <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app'
98 Level='1' Absent='disallow' >
103 Level='1' Absent='disallow' >
99 <ComponentRef Id='MainExecutable' />
104 <ComponentRef Id='MainExecutable' />
100 <ComponentRef Id='ProgramMenuDir' />
105 <ComponentRef Id='ProgramMenuDir' />
101 <ComponentGroupRef Id="hg.group.ROOT" />
106 <ComponentGroupRef Id="hg.group.ROOT" />
102 <ComponentGroupRef Id="hg.group.defaultrc" />
107 <ComponentGroupRef Id="hg.group.defaultrc" />
103 <ComponentGroupRef Id="hg.group.helptext" />
108 <ComponentGroupRef Id="hg.group.helptext" />
104 <ComponentGroupRef Id="hg.group.lib" />
109 <?ifdef MercurialHasLib?>
110 <ComponentGroupRef Id="hg.group.lib" />
111 <?endif?>
105 <ComponentGroupRef Id="hg.group.templates" />
112 <ComponentGroupRef Id="hg.group.templates" />
106 <MergeRef Id='VCRuntime' />
113 <?if $(var.PythonVersion) = "2" ?>
107 <MergeRef Id='VCRuntimePolicy' />
114 <MergeRef Id='VCRuntime' />
115 <MergeRef Id='VCRuntimePolicy' />
116 <?endif?>
108 </Feature>
117 </Feature>
109 <?ifdef MercurialExtraFeatures?>
118 <?ifdef MercurialExtraFeatures?>
110 <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?>
119 <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?>
111 <FeatureRef Id="$(var.EXTRAFEAT)" />
120 <FeatureRef Id="$(var.EXTRAFEAT)" />
112 <?endforeach?>
121 <?endforeach?>
113 <?endif?>
122 <?endif?>
114 <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'>
123 <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'>
115 <ComponentGroupRef Id="hg.group.locale" />
124 <ComponentGroupRef Id="hg.group.locale" />
116 </Feature>
125 </Feature>
117 <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'>
126 <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'>
118 <ComponentGroupRef Id="hg.group.doc" />
127 <ComponentGroupRef Id="hg.group.doc" />
119 </Feature>
128 </Feature>
120 <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'>
129 <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'>
121 <ComponentGroupRef Id="hg.group.contrib" />
130 <ComponentGroupRef Id="hg.group.contrib" />
122 </Feature>
131 </Feature>
123 </Feature>
132 </Feature>
124
133
125 <UIRef Id="WixUI_FeatureTree" />
134 <UIRef Id="WixUI_FeatureTree" />
126 <UIRef Id="WixUI_ErrorProgressText" />
135 <UIRef Id="WixUI_ErrorProgressText" />
127
136
128 <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" />
137 <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" />
129
138
130 <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" />
139 <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" />
131
140
132 <Upgrade Id='$(var.ProductUpgradeCode)'>
141 <Upgrade Id='$(var.ProductUpgradeCode)'>
133 <UpgradeVersion
142 <UpgradeVersion
134 IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no'
143 IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no'
135 Property='INSTALLEDMERCURIALPRODUCTS' />
144 Property='INSTALLEDMERCURIALPRODUCTS' />
136 </Upgrade>
145 </Upgrade>
137
146
138 <InstallExecuteSequence>
147 <InstallExecuteSequence>
139 <RemoveExistingProducts After='InstallInitialize'/>
148 <RemoveExistingProducts After='InstallInitialize'/>
140 </InstallExecuteSequence>
149 </InstallExecuteSequence>
141
150
142 </Product>
151 </Product>
143 </Wix>
152 </Wix>
@@ -1,3227 +1,3227 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import, print_function
9 from __future__ import absolute_import, print_function
10
10
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import copy
13 import copy
14 import errno
14 import errno
15 import os
15 import os
16 import re
16 import re
17 import shutil
17 import shutil
18 import zlib
18 import zlib
19
19
20 from .i18n import _
20 from .i18n import _
21 from .node import (
21 from .node import (
22 hex,
22 hex,
23 short,
23 short,
24 )
24 )
25 from .pycompat import open
25 from .pycompat import open
26 from . import (
26 from . import (
27 copies,
27 copies,
28 diffhelper,
28 diffhelper,
29 diffutil,
29 diffutil,
30 encoding,
30 encoding,
31 error,
31 error,
32 mail,
32 mail,
33 mdiff,
33 mdiff,
34 pathutil,
34 pathutil,
35 pycompat,
35 pycompat,
36 scmutil,
36 scmutil,
37 similar,
37 similar,
38 util,
38 util,
39 vfs as vfsmod,
39 vfs as vfsmod,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 dateutil,
42 dateutil,
43 hashutil,
43 hashutil,
44 procutil,
44 procutil,
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48 stringio = util.stringio
48 stringio = util.stringio
49
49
50 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
50 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
51 tabsplitter = re.compile(br'(\t+|[^\t]+)')
51 tabsplitter = re.compile(br'(\t+|[^\t]+)')
52 wordsplitter = re.compile(
52 wordsplitter = re.compile(
53 br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])'
53 br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])'
54 )
54 )
55
55
56 PatchError = error.PatchError
56 PatchError = error.PatchError
57
57
58 # public functions
58 # public functions
59
59
60
60
61 def split(stream):
61 def split(stream):
62 '''return an iterator of individual patches from a stream'''
62 '''return an iterator of individual patches from a stream'''
63
63
64 def isheader(line, inheader):
64 def isheader(line, inheader):
65 if inheader and line.startswith((b' ', b'\t')):
65 if inheader and line.startswith((b' ', b'\t')):
66 # continuation
66 # continuation
67 return True
67 return True
68 if line.startswith((b' ', b'-', b'+')):
68 if line.startswith((b' ', b'-', b'+')):
69 # diff line - don't check for header pattern in there
69 # diff line - don't check for header pattern in there
70 return False
70 return False
71 l = line.split(b': ', 1)
71 l = line.split(b': ', 1)
72 return len(l) == 2 and b' ' not in l[0]
72 return len(l) == 2 and b' ' not in l[0]
73
73
74 def chunk(lines):
74 def chunk(lines):
75 return stringio(b''.join(lines))
75 return stringio(b''.join(lines))
76
76
77 def hgsplit(stream, cur):
77 def hgsplit(stream, cur):
78 inheader = True
78 inheader = True
79
79
80 for line in stream:
80 for line in stream:
81 if not line.strip():
81 if not line.strip():
82 inheader = False
82 inheader = False
83 if not inheader and line.startswith(b'# HG changeset patch'):
83 if not inheader and line.startswith(b'# HG changeset patch'):
84 yield chunk(cur)
84 yield chunk(cur)
85 cur = []
85 cur = []
86 inheader = True
86 inheader = True
87
87
88 cur.append(line)
88 cur.append(line)
89
89
90 if cur:
90 if cur:
91 yield chunk(cur)
91 yield chunk(cur)
92
92
93 def mboxsplit(stream, cur):
93 def mboxsplit(stream, cur):
94 for line in stream:
94 for line in stream:
95 if line.startswith(b'From '):
95 if line.startswith(b'From '):
96 for c in split(chunk(cur[1:])):
96 for c in split(chunk(cur[1:])):
97 yield c
97 yield c
98 cur = []
98 cur = []
99
99
100 cur.append(line)
100 cur.append(line)
101
101
102 if cur:
102 if cur:
103 for c in split(chunk(cur[1:])):
103 for c in split(chunk(cur[1:])):
104 yield c
104 yield c
105
105
106 def mimesplit(stream, cur):
106 def mimesplit(stream, cur):
107 def msgfp(m):
107 def msgfp(m):
108 fp = stringio()
108 fp = stringio()
109 g = mail.Generator(fp, mangle_from_=False)
109 g = mail.Generator(fp, mangle_from_=False)
110 g.flatten(m)
110 g.flatten(m)
111 fp.seek(0)
111 fp.seek(0)
112 return fp
112 return fp
113
113
114 for line in stream:
114 for line in stream:
115 cur.append(line)
115 cur.append(line)
116 c = chunk(cur)
116 c = chunk(cur)
117
117
118 m = mail.parse(c)
118 m = mail.parse(c)
119 if not m.is_multipart():
119 if not m.is_multipart():
120 yield msgfp(m)
120 yield msgfp(m)
121 else:
121 else:
122 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
122 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
123 for part in m.walk():
123 for part in m.walk():
124 ct = part.get_content_type()
124 ct = part.get_content_type()
125 if ct not in ok_types:
125 if ct not in ok_types:
126 continue
126 continue
127 yield msgfp(part)
127 yield msgfp(part)
128
128
129 def headersplit(stream, cur):
129 def headersplit(stream, cur):
130 inheader = False
130 inheader = False
131
131
132 for line in stream:
132 for line in stream:
133 if not inheader and isheader(line, inheader):
133 if not inheader and isheader(line, inheader):
134 yield chunk(cur)
134 yield chunk(cur)
135 cur = []
135 cur = []
136 inheader = True
136 inheader = True
137 if inheader and not isheader(line, inheader):
137 if inheader and not isheader(line, inheader):
138 inheader = False
138 inheader = False
139
139
140 cur.append(line)
140 cur.append(line)
141
141
142 if cur:
142 if cur:
143 yield chunk(cur)
143 yield chunk(cur)
144
144
145 def remainder(cur):
145 def remainder(cur):
146 yield chunk(cur)
146 yield chunk(cur)
147
147
148 class fiter(object):
148 class fiter(object):
149 def __init__(self, fp):
149 def __init__(self, fp):
150 self.fp = fp
150 self.fp = fp
151
151
152 def __iter__(self):
152 def __iter__(self):
153 return self
153 return self
154
154
155 def next(self):
155 def next(self):
156 l = self.fp.readline()
156 l = self.fp.readline()
157 if not l:
157 if not l:
158 raise StopIteration
158 raise StopIteration
159 return l
159 return l
160
160
161 __next__ = next
161 __next__ = next
162
162
163 inheader = False
163 inheader = False
164 cur = []
164 cur = []
165
165
166 mimeheaders = [b'content-type']
166 mimeheaders = [b'content-type']
167
167
168 if not util.safehasattr(stream, b'next'):
168 if not util.safehasattr(stream, b'next'):
169 # http responses, for example, have readline but not next
169 # http responses, for example, have readline but not next
170 stream = fiter(stream)
170 stream = fiter(stream)
171
171
172 for line in stream:
172 for line in stream:
173 cur.append(line)
173 cur.append(line)
174 if line.startswith(b'# HG changeset patch'):
174 if line.startswith(b'# HG changeset patch'):
175 return hgsplit(stream, cur)
175 return hgsplit(stream, cur)
176 elif line.startswith(b'From '):
176 elif line.startswith(b'From '):
177 return mboxsplit(stream, cur)
177 return mboxsplit(stream, cur)
178 elif isheader(line, inheader):
178 elif isheader(line, inheader):
179 inheader = True
179 inheader = True
180 if line.split(b':', 1)[0].lower() in mimeheaders:
180 if line.split(b':', 1)[0].lower() in mimeheaders:
181 # let email parser handle this
181 # let email parser handle this
182 return mimesplit(stream, cur)
182 return mimesplit(stream, cur)
183 elif line.startswith(b'--- ') and inheader:
183 elif line.startswith(b'--- ') and inheader:
184 # No evil headers seen by diff start, split by hand
184 # No evil headers seen by diff start, split by hand
185 return headersplit(stream, cur)
185 return headersplit(stream, cur)
186 # Not enough info, keep reading
186 # Not enough info, keep reading
187
187
188 # if we are here, we have a very plain patch
188 # if we are here, we have a very plain patch
189 return remainder(cur)
189 return remainder(cur)
190
190
191
191
192 ## Some facility for extensible patch parsing:
192 ## Some facility for extensible patch parsing:
193 # list of pairs ("header to match", "data key")
193 # list of pairs ("header to match", "data key")
194 patchheadermap = [
194 patchheadermap = [
195 (b'Date', b'date'),
195 (b'Date', b'date'),
196 (b'Branch', b'branch'),
196 (b'Branch', b'branch'),
197 (b'Node ID', b'nodeid'),
197 (b'Node ID', b'nodeid'),
198 ]
198 ]
199
199
200
200
201 @contextlib.contextmanager
201 @contextlib.contextmanager
202 def extract(ui, fileobj):
202 def extract(ui, fileobj):
203 '''extract patch from data read from fileobj.
203 '''extract patch from data read from fileobj.
204
204
205 patch can be a normal patch or contained in an email message.
205 patch can be a normal patch or contained in an email message.
206
206
207 return a dictionary. Standard keys are:
207 return a dictionary. Standard keys are:
208 - filename,
208 - filename,
209 - message,
209 - message,
210 - user,
210 - user,
211 - date,
211 - date,
212 - branch,
212 - branch,
213 - node,
213 - node,
214 - p1,
214 - p1,
215 - p2.
215 - p2.
216 Any item can be missing from the dictionary. If filename is missing,
216 Any item can be missing from the dictionary. If filename is missing,
217 fileobj did not contain a patch. Caller must unlink filename when done.'''
217 fileobj did not contain a patch. Caller must unlink filename when done.'''
218
218
219 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
219 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
220 tmpfp = os.fdopen(fd, 'wb')
220 tmpfp = os.fdopen(fd, 'wb')
221 try:
221 try:
222 yield _extract(ui, fileobj, tmpname, tmpfp)
222 yield _extract(ui, fileobj, tmpname, tmpfp)
223 finally:
223 finally:
224 tmpfp.close()
224 tmpfp.close()
225 os.unlink(tmpname)
225 os.unlink(tmpname)
226
226
227
227
228 def _extract(ui, fileobj, tmpname, tmpfp):
228 def _extract(ui, fileobj, tmpname, tmpfp):
229
229
230 # attempt to detect the start of a patch
230 # attempt to detect the start of a patch
231 # (this heuristic is borrowed from quilt)
231 # (this heuristic is borrowed from quilt)
232 diffre = re.compile(
232 diffre = re.compile(
233 br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
233 br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
234 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
234 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
235 br'---[ \t].*?^\+\+\+[ \t]|'
235 br'---[ \t].*?^\+\+\+[ \t]|'
236 br'\*\*\*[ \t].*?^---[ \t])',
236 br'\*\*\*[ \t].*?^---[ \t])',
237 re.MULTILINE | re.DOTALL,
237 re.MULTILINE | re.DOTALL,
238 )
238 )
239
239
240 data = {}
240 data = {}
241
241
242 msg = mail.parse(fileobj)
242 msg = mail.parse(fileobj)
243
243
244 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
244 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
245 data[b'user'] = msg['From'] and mail.headdecode(msg['From'])
245 data[b'user'] = msg['From'] and mail.headdecode(msg['From'])
246 if not subject and not data[b'user']:
246 if not subject and not data[b'user']:
247 # Not an email, restore parsed headers if any
247 # Not an email, restore parsed headers if any
248 subject = (
248 subject = (
249 b'\n'.join(
249 b'\n'.join(
250 b': '.join(map(encoding.strtolocal, h)) for h in msg.items()
250 b': '.join(map(encoding.strtolocal, h)) for h in msg.items()
251 )
251 )
252 + b'\n'
252 + b'\n'
253 )
253 )
254
254
255 # should try to parse msg['Date']
255 # should try to parse msg['Date']
256 parents = []
256 parents = []
257
257
258 nodeid = msg['X-Mercurial-Node']
258 nodeid = msg['X-Mercurial-Node']
259 if nodeid:
259 if nodeid:
260 data[b'nodeid'] = nodeid = mail.headdecode(nodeid)
260 data[b'nodeid'] = nodeid = mail.headdecode(nodeid)
261 ui.debug(b'Node ID: %s\n' % nodeid)
261 ui.debug(b'Node ID: %s\n' % nodeid)
262
262
263 if subject:
263 if subject:
264 if subject.startswith(b'[PATCH'):
264 if subject.startswith(b'[PATCH'):
265 pend = subject.find(b']')
265 pend = subject.find(b']')
266 if pend >= 0:
266 if pend >= 0:
267 subject = subject[pend + 1 :].lstrip()
267 subject = subject[pend + 1 :].lstrip()
268 subject = re.sub(br'\n[ \t]+', b' ', subject)
268 subject = re.sub(br'\n[ \t]+', b' ', subject)
269 ui.debug(b'Subject: %s\n' % subject)
269 ui.debug(b'Subject: %s\n' % subject)
270 if data[b'user']:
270 if data[b'user']:
271 ui.debug(b'From: %s\n' % data[b'user'])
271 ui.debug(b'From: %s\n' % data[b'user'])
272 diffs_seen = 0
272 diffs_seen = 0
273 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
273 ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
274 message = b''
274 message = b''
275 for part in msg.walk():
275 for part in msg.walk():
276 content_type = pycompat.bytestr(part.get_content_type())
276 content_type = pycompat.bytestr(part.get_content_type())
277 ui.debug(b'Content-Type: %s\n' % content_type)
277 ui.debug(b'Content-Type: %s\n' % content_type)
278 if content_type not in ok_types:
278 if content_type not in ok_types:
279 continue
279 continue
280 payload = part.get_payload(decode=True)
280 payload = part.get_payload(decode=True)
281 m = diffre.search(payload)
281 m = diffre.search(payload)
282 if m:
282 if m:
283 hgpatch = False
283 hgpatch = False
284 hgpatchheader = False
284 hgpatchheader = False
285 ignoretext = False
285 ignoretext = False
286
286
287 ui.debug(b'found patch at byte %d\n' % m.start(0))
287 ui.debug(b'found patch at byte %d\n' % m.start(0))
288 diffs_seen += 1
288 diffs_seen += 1
289 cfp = stringio()
289 cfp = stringio()
290 for line in payload[: m.start(0)].splitlines():
290 for line in payload[: m.start(0)].splitlines():
291 if line.startswith(b'# HG changeset patch') and not hgpatch:
291 if line.startswith(b'# HG changeset patch') and not hgpatch:
292 ui.debug(b'patch generated by hg export\n')
292 ui.debug(b'patch generated by hg export\n')
293 hgpatch = True
293 hgpatch = True
294 hgpatchheader = True
294 hgpatchheader = True
295 # drop earlier commit message content
295 # drop earlier commit message content
296 cfp.seek(0)
296 cfp.seek(0)
297 cfp.truncate()
297 cfp.truncate()
298 subject = None
298 subject = None
299 elif hgpatchheader:
299 elif hgpatchheader:
300 if line.startswith(b'# User '):
300 if line.startswith(b'# User '):
301 data[b'user'] = line[7:]
301 data[b'user'] = line[7:]
302 ui.debug(b'From: %s\n' % data[b'user'])
302 ui.debug(b'From: %s\n' % data[b'user'])
303 elif line.startswith(b"# Parent "):
303 elif line.startswith(b"# Parent "):
304 parents.append(line[9:].lstrip())
304 parents.append(line[9:].lstrip())
305 elif line.startswith(b"# "):
305 elif line.startswith(b"# "):
306 for header, key in patchheadermap:
306 for header, key in patchheadermap:
307 prefix = b'# %s ' % header
307 prefix = b'# %s ' % header
308 if line.startswith(prefix):
308 if line.startswith(prefix):
309 data[key] = line[len(prefix) :]
309 data[key] = line[len(prefix) :]
310 ui.debug(b'%s: %s\n' % (header, data[key]))
310 ui.debug(b'%s: %s\n' % (header, data[key]))
311 else:
311 else:
312 hgpatchheader = False
312 hgpatchheader = False
313 elif line == b'---':
313 elif line == b'---':
314 ignoretext = True
314 ignoretext = True
315 if not hgpatchheader and not ignoretext:
315 if not hgpatchheader and not ignoretext:
316 cfp.write(line)
316 cfp.write(line)
317 cfp.write(b'\n')
317 cfp.write(b'\n')
318 message = cfp.getvalue()
318 message = cfp.getvalue()
319 if tmpfp:
319 if tmpfp:
320 tmpfp.write(payload)
320 tmpfp.write(payload)
321 if not payload.endswith(b'\n'):
321 if not payload.endswith(b'\n'):
322 tmpfp.write(b'\n')
322 tmpfp.write(b'\n')
323 elif not diffs_seen and message and content_type == b'text/plain':
323 elif not diffs_seen and message and content_type == b'text/plain':
324 message += b'\n' + payload
324 message += b'\n' + payload
325
325
326 if subject and not message.startswith(subject):
326 if subject and not message.startswith(subject):
327 message = b'%s\n%s' % (subject, message)
327 message = b'%s\n%s' % (subject, message)
328 data[b'message'] = message
328 data[b'message'] = message
329 tmpfp.close()
329 tmpfp.close()
330 if parents:
330 if parents:
331 data[b'p1'] = parents.pop(0)
331 data[b'p1'] = parents.pop(0)
332 if parents:
332 if parents:
333 data[b'p2'] = parents.pop(0)
333 data[b'p2'] = parents.pop(0)
334
334
335 if diffs_seen:
335 if diffs_seen:
336 data[b'filename'] = tmpname
336 data[b'filename'] = tmpname
337
337
338 return data
338 return data
339
339
340
340
341 class patchmeta(object):
341 class patchmeta(object):
342 """Patched file metadata
342 """Patched file metadata
343
343
344 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
344 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
345 or COPY. 'path' is patched file path. 'oldpath' is set to the
345 or COPY. 'path' is patched file path. 'oldpath' is set to the
346 origin file when 'op' is either COPY or RENAME, None otherwise. If
346 origin file when 'op' is either COPY or RENAME, None otherwise. If
347 file mode is changed, 'mode' is a tuple (islink, isexec) where
347 file mode is changed, 'mode' is a tuple (islink, isexec) where
348 'islink' is True if the file is a symlink and 'isexec' is True if
348 'islink' is True if the file is a symlink and 'isexec' is True if
349 the file is executable. Otherwise, 'mode' is None.
349 the file is executable. Otherwise, 'mode' is None.
350 """
350 """
351
351
352 def __init__(self, path):
352 def __init__(self, path):
353 self.path = path
353 self.path = path
354 self.oldpath = None
354 self.oldpath = None
355 self.mode = None
355 self.mode = None
356 self.op = b'MODIFY'
356 self.op = b'MODIFY'
357 self.binary = False
357 self.binary = False
358
358
359 def setmode(self, mode):
359 def setmode(self, mode):
360 islink = mode & 0o20000
360 islink = mode & 0o20000
361 isexec = mode & 0o100
361 isexec = mode & 0o100
362 self.mode = (islink, isexec)
362 self.mode = (islink, isexec)
363
363
364 def copy(self):
364 def copy(self):
365 other = patchmeta(self.path)
365 other = patchmeta(self.path)
366 other.oldpath = self.oldpath
366 other.oldpath = self.oldpath
367 other.mode = self.mode
367 other.mode = self.mode
368 other.op = self.op
368 other.op = self.op
369 other.binary = self.binary
369 other.binary = self.binary
370 return other
370 return other
371
371
372 def _ispatchinga(self, afile):
372 def _ispatchinga(self, afile):
373 if afile == b'/dev/null':
373 if afile == b'/dev/null':
374 return self.op == b'ADD'
374 return self.op == b'ADD'
375 return afile == b'a/' + (self.oldpath or self.path)
375 return afile == b'a/' + (self.oldpath or self.path)
376
376
377 def _ispatchingb(self, bfile):
377 def _ispatchingb(self, bfile):
378 if bfile == b'/dev/null':
378 if bfile == b'/dev/null':
379 return self.op == b'DELETE'
379 return self.op == b'DELETE'
380 return bfile == b'b/' + self.path
380 return bfile == b'b/' + self.path
381
381
382 def ispatching(self, afile, bfile):
382 def ispatching(self, afile, bfile):
383 return self._ispatchinga(afile) and self._ispatchingb(bfile)
383 return self._ispatchinga(afile) and self._ispatchingb(bfile)
384
384
385 def __repr__(self):
385 def __repr__(self):
386 return "<patchmeta %s %r>" % (self.op, self.path)
386 return "<patchmeta %s %r>" % (self.op, self.path)
387
387
388
388
389 def readgitpatch(lr):
389 def readgitpatch(lr):
390 """extract git-style metadata about patches from <patchname>"""
390 """extract git-style metadata about patches from <patchname>"""
391
391
392 # Filter patch for git information
392 # Filter patch for git information
393 gp = None
393 gp = None
394 gitpatches = []
394 gitpatches = []
395 for line in lr:
395 for line in lr:
396 line = line.rstrip(b' \r\n')
396 line = line.rstrip(b' \r\n')
397 if line.startswith(b'diff --git a/'):
397 if line.startswith(b'diff --git a/'):
398 m = gitre.match(line)
398 m = gitre.match(line)
399 if m:
399 if m:
400 if gp:
400 if gp:
401 gitpatches.append(gp)
401 gitpatches.append(gp)
402 dst = m.group(2)
402 dst = m.group(2)
403 gp = patchmeta(dst)
403 gp = patchmeta(dst)
404 elif gp:
404 elif gp:
405 if line.startswith(b'--- '):
405 if line.startswith(b'--- '):
406 gitpatches.append(gp)
406 gitpatches.append(gp)
407 gp = None
407 gp = None
408 continue
408 continue
409 if line.startswith(b'rename from '):
409 if line.startswith(b'rename from '):
410 gp.op = b'RENAME'
410 gp.op = b'RENAME'
411 gp.oldpath = line[12:]
411 gp.oldpath = line[12:]
412 elif line.startswith(b'rename to '):
412 elif line.startswith(b'rename to '):
413 gp.path = line[10:]
413 gp.path = line[10:]
414 elif line.startswith(b'copy from '):
414 elif line.startswith(b'copy from '):
415 gp.op = b'COPY'
415 gp.op = b'COPY'
416 gp.oldpath = line[10:]
416 gp.oldpath = line[10:]
417 elif line.startswith(b'copy to '):
417 elif line.startswith(b'copy to '):
418 gp.path = line[8:]
418 gp.path = line[8:]
419 elif line.startswith(b'deleted file'):
419 elif line.startswith(b'deleted file'):
420 gp.op = b'DELETE'
420 gp.op = b'DELETE'
421 elif line.startswith(b'new file mode '):
421 elif line.startswith(b'new file mode '):
422 gp.op = b'ADD'
422 gp.op = b'ADD'
423 gp.setmode(int(line[-6:], 8))
423 gp.setmode(int(line[-6:], 8))
424 elif line.startswith(b'new mode '):
424 elif line.startswith(b'new mode '):
425 gp.setmode(int(line[-6:], 8))
425 gp.setmode(int(line[-6:], 8))
426 elif line.startswith(b'GIT binary patch'):
426 elif line.startswith(b'GIT binary patch'):
427 gp.binary = True
427 gp.binary = True
428 if gp:
428 if gp:
429 gitpatches.append(gp)
429 gitpatches.append(gp)
430
430
431 return gitpatches
431 return gitpatches
432
432
433
433
434 class linereader(object):
434 class linereader(object):
435 # simple class to allow pushing lines back into the input stream
435 # simple class to allow pushing lines back into the input stream
436 def __init__(self, fp):
436 def __init__(self, fp):
437 self.fp = fp
437 self.fp = fp
438 self.buf = []
438 self.buf = []
439
439
440 def push(self, line):
440 def push(self, line):
441 if line is not None:
441 if line is not None:
442 self.buf.append(line)
442 self.buf.append(line)
443
443
444 def readline(self):
444 def readline(self):
445 if self.buf:
445 if self.buf:
446 l = self.buf[0]
446 l = self.buf[0]
447 del self.buf[0]
447 del self.buf[0]
448 return l
448 return l
449 return self.fp.readline()
449 return self.fp.readline()
450
450
451 def __iter__(self):
451 def __iter__(self):
452 return iter(self.readline, b'')
452 return iter(self.readline, b'')
453
453
454
454
455 class abstractbackend(object):
455 class abstractbackend(object):
456 def __init__(self, ui):
456 def __init__(self, ui):
457 self.ui = ui
457 self.ui = ui
458
458
459 def getfile(self, fname):
459 def getfile(self, fname):
460 """Return target file data and flags as a (data, (islink,
460 """Return target file data and flags as a (data, (islink,
461 isexec)) tuple. Data is None if file is missing/deleted.
461 isexec)) tuple. Data is None if file is missing/deleted.
462 """
462 """
463 raise NotImplementedError
463 raise NotImplementedError
464
464
465 def setfile(self, fname, data, mode, copysource):
465 def setfile(self, fname, data, mode, copysource):
466 """Write data to target file fname and set its mode. mode is a
466 """Write data to target file fname and set its mode. mode is a
467 (islink, isexec) tuple. If data is None, the file content should
467 (islink, isexec) tuple. If data is None, the file content should
468 be left unchanged. If the file is modified after being copied,
468 be left unchanged. If the file is modified after being copied,
469 copysource is set to the original file name.
469 copysource is set to the original file name.
470 """
470 """
471 raise NotImplementedError
471 raise NotImplementedError
472
472
473 def unlink(self, fname):
473 def unlink(self, fname):
474 """Unlink target file."""
474 """Unlink target file."""
475 raise NotImplementedError
475 raise NotImplementedError
476
476
477 def writerej(self, fname, failed, total, lines):
477 def writerej(self, fname, failed, total, lines):
478 """Write rejected lines for fname. total is the number of hunks
478 """Write rejected lines for fname. total is the number of hunks
479 which failed to apply and total the total number of hunks for this
479 which failed to apply and total the total number of hunks for this
480 files.
480 files.
481 """
481 """
482
482
483 def exists(self, fname):
483 def exists(self, fname):
484 raise NotImplementedError
484 raise NotImplementedError
485
485
486 def close(self):
486 def close(self):
487 raise NotImplementedError
487 raise NotImplementedError
488
488
489
489
490 class fsbackend(abstractbackend):
490 class fsbackend(abstractbackend):
491 def __init__(self, ui, basedir):
491 def __init__(self, ui, basedir):
492 super(fsbackend, self).__init__(ui)
492 super(fsbackend, self).__init__(ui)
493 self.opener = vfsmod.vfs(basedir)
493 self.opener = vfsmod.vfs(basedir)
494
494
495 def getfile(self, fname):
495 def getfile(self, fname):
496 if self.opener.islink(fname):
496 if self.opener.islink(fname):
497 return (self.opener.readlink(fname), (True, False))
497 return (self.opener.readlink(fname), (True, False))
498
498
499 isexec = False
499 isexec = False
500 try:
500 try:
501 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
501 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
502 except OSError as e:
502 except OSError as e:
503 if e.errno != errno.ENOENT:
503 if e.errno != errno.ENOENT:
504 raise
504 raise
505 try:
505 try:
506 return (self.opener.read(fname), (False, isexec))
506 return (self.opener.read(fname), (False, isexec))
507 except IOError as e:
507 except IOError as e:
508 if e.errno != errno.ENOENT:
508 if e.errno != errno.ENOENT:
509 raise
509 raise
510 return None, None
510 return None, None
511
511
512 def setfile(self, fname, data, mode, copysource):
512 def setfile(self, fname, data, mode, copysource):
513 islink, isexec = mode
513 islink, isexec = mode
514 if data is None:
514 if data is None:
515 self.opener.setflags(fname, islink, isexec)
515 self.opener.setflags(fname, islink, isexec)
516 return
516 return
517 if islink:
517 if islink:
518 self.opener.symlink(data, fname)
518 self.opener.symlink(data, fname)
519 else:
519 else:
520 self.opener.write(fname, data)
520 self.opener.write(fname, data)
521 if isexec:
521 if isexec:
522 self.opener.setflags(fname, False, True)
522 self.opener.setflags(fname, False, True)
523
523
524 def unlink(self, fname):
524 def unlink(self, fname):
525 rmdir = self.ui.configbool(b'experimental', b'removeemptydirs')
525 rmdir = self.ui.configbool(b'experimental', b'removeemptydirs')
526 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
526 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
527
527
528 def writerej(self, fname, failed, total, lines):
528 def writerej(self, fname, failed, total, lines):
529 fname = fname + b".rej"
529 fname = fname + b".rej"
530 self.ui.warn(
530 self.ui.warn(
531 _(b"%d out of %d hunks FAILED -- saving rejects to file %s\n")
531 _(b"%d out of %d hunks FAILED -- saving rejects to file %s\n")
532 % (failed, total, fname)
532 % (failed, total, fname)
533 )
533 )
534 fp = self.opener(fname, b'w')
534 fp = self.opener(fname, b'w')
535 fp.writelines(lines)
535 fp.writelines(lines)
536 fp.close()
536 fp.close()
537
537
538 def exists(self, fname):
538 def exists(self, fname):
539 return self.opener.lexists(fname)
539 return self.opener.lexists(fname)
540
540
541
541
542 class workingbackend(fsbackend):
542 class workingbackend(fsbackend):
543 def __init__(self, ui, repo, similarity):
543 def __init__(self, ui, repo, similarity):
544 super(workingbackend, self).__init__(ui, repo.root)
544 super(workingbackend, self).__init__(ui, repo.root)
545 self.repo = repo
545 self.repo = repo
546 self.similarity = similarity
546 self.similarity = similarity
547 self.removed = set()
547 self.removed = set()
548 self.changed = set()
548 self.changed = set()
549 self.copied = []
549 self.copied = []
550
550
551 def _checkknown(self, fname):
551 def _checkknown(self, fname):
552 if self.repo.dirstate[fname] == b'?' and self.exists(fname):
552 if self.repo.dirstate[fname] == b'?' and self.exists(fname):
553 raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
553 raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
554
554
555 def setfile(self, fname, data, mode, copysource):
555 def setfile(self, fname, data, mode, copysource):
556 self._checkknown(fname)
556 self._checkknown(fname)
557 super(workingbackend, self).setfile(fname, data, mode, copysource)
557 super(workingbackend, self).setfile(fname, data, mode, copysource)
558 if copysource is not None:
558 if copysource is not None:
559 self.copied.append((copysource, fname))
559 self.copied.append((copysource, fname))
560 self.changed.add(fname)
560 self.changed.add(fname)
561
561
562 def unlink(self, fname):
562 def unlink(self, fname):
563 self._checkknown(fname)
563 self._checkknown(fname)
564 super(workingbackend, self).unlink(fname)
564 super(workingbackend, self).unlink(fname)
565 self.removed.add(fname)
565 self.removed.add(fname)
566 self.changed.add(fname)
566 self.changed.add(fname)
567
567
568 def close(self):
568 def close(self):
569 wctx = self.repo[None]
569 wctx = self.repo[None]
570 changed = set(self.changed)
570 changed = set(self.changed)
571 for src, dst in self.copied:
571 for src, dst in self.copied:
572 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
572 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
573 if self.removed:
573 if self.removed:
574 wctx.forget(sorted(self.removed))
574 wctx.forget(sorted(self.removed))
575 for f in self.removed:
575 for f in self.removed:
576 if f not in self.repo.dirstate:
576 if f not in self.repo.dirstate:
577 # File was deleted and no longer belongs to the
577 # File was deleted and no longer belongs to the
578 # dirstate, it was probably marked added then
578 # dirstate, it was probably marked added then
579 # deleted, and should not be considered by
579 # deleted, and should not be considered by
580 # marktouched().
580 # marktouched().
581 changed.discard(f)
581 changed.discard(f)
582 if changed:
582 if changed:
583 scmutil.marktouched(self.repo, changed, self.similarity)
583 scmutil.marktouched(self.repo, changed, self.similarity)
584 return sorted(self.changed)
584 return sorted(self.changed)
585
585
586
586
587 class filestore(object):
587 class filestore(object):
588 def __init__(self, maxsize=None):
588 def __init__(self, maxsize=None):
589 self.opener = None
589 self.opener = None
590 self.files = {}
590 self.files = {}
591 self.created = 0
591 self.created = 0
592 self.maxsize = maxsize
592 self.maxsize = maxsize
593 if self.maxsize is None:
593 if self.maxsize is None:
594 self.maxsize = 4 * (2 ** 20)
594 self.maxsize = 4 * (2 ** 20)
595 self.size = 0
595 self.size = 0
596 self.data = {}
596 self.data = {}
597
597
598 def setfile(self, fname, data, mode, copied=None):
598 def setfile(self, fname, data, mode, copied=None):
599 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
599 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
600 self.data[fname] = (data, mode, copied)
600 self.data[fname] = (data, mode, copied)
601 self.size += len(data)
601 self.size += len(data)
602 else:
602 else:
603 if self.opener is None:
603 if self.opener is None:
604 root = pycompat.mkdtemp(prefix=b'hg-patch-')
604 root = pycompat.mkdtemp(prefix=b'hg-patch-')
605 self.opener = vfsmod.vfs(root)
605 self.opener = vfsmod.vfs(root)
606 # Avoid filename issues with these simple names
606 # Avoid filename issues with these simple names
607 fn = b'%d' % self.created
607 fn = b'%d' % self.created
608 self.opener.write(fn, data)
608 self.opener.write(fn, data)
609 self.created += 1
609 self.created += 1
610 self.files[fname] = (fn, mode, copied)
610 self.files[fname] = (fn, mode, copied)
611
611
612 def getfile(self, fname):
612 def getfile(self, fname):
613 if fname in self.data:
613 if fname in self.data:
614 return self.data[fname]
614 return self.data[fname]
615 if not self.opener or fname not in self.files:
615 if not self.opener or fname not in self.files:
616 return None, None, None
616 return None, None, None
617 fn, mode, copied = self.files[fname]
617 fn, mode, copied = self.files[fname]
618 return self.opener.read(fn), mode, copied
618 return self.opener.read(fn), mode, copied
619
619
620 def close(self):
620 def close(self):
621 if self.opener:
621 if self.opener:
622 shutil.rmtree(self.opener.base)
622 shutil.rmtree(self.opener.base)
623
623
624
624
625 class repobackend(abstractbackend):
625 class repobackend(abstractbackend):
626 def __init__(self, ui, repo, ctx, store):
626 def __init__(self, ui, repo, ctx, store):
627 super(repobackend, self).__init__(ui)
627 super(repobackend, self).__init__(ui)
628 self.repo = repo
628 self.repo = repo
629 self.ctx = ctx
629 self.ctx = ctx
630 self.store = store
630 self.store = store
631 self.changed = set()
631 self.changed = set()
632 self.removed = set()
632 self.removed = set()
633 self.copied = {}
633 self.copied = {}
634
634
635 def _checkknown(self, fname):
635 def _checkknown(self, fname):
636 if fname not in self.ctx:
636 if fname not in self.ctx:
637 raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
637 raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
638
638
639 def getfile(self, fname):
639 def getfile(self, fname):
640 try:
640 try:
641 fctx = self.ctx[fname]
641 fctx = self.ctx[fname]
642 except error.LookupError:
642 except error.LookupError:
643 return None, None
643 return None, None
644 flags = fctx.flags()
644 flags = fctx.flags()
645 return fctx.data(), (b'l' in flags, b'x' in flags)
645 return fctx.data(), (b'l' in flags, b'x' in flags)
646
646
647 def setfile(self, fname, data, mode, copysource):
647 def setfile(self, fname, data, mode, copysource):
648 if copysource:
648 if copysource:
649 self._checkknown(copysource)
649 self._checkknown(copysource)
650 if data is None:
650 if data is None:
651 data = self.ctx[fname].data()
651 data = self.ctx[fname].data()
652 self.store.setfile(fname, data, mode, copysource)
652 self.store.setfile(fname, data, mode, copysource)
653 self.changed.add(fname)
653 self.changed.add(fname)
654 if copysource:
654 if copysource:
655 self.copied[fname] = copysource
655 self.copied[fname] = copysource
656
656
657 def unlink(self, fname):
657 def unlink(self, fname):
658 self._checkknown(fname)
658 self._checkknown(fname)
659 self.removed.add(fname)
659 self.removed.add(fname)
660
660
661 def exists(self, fname):
661 def exists(self, fname):
662 return fname in self.ctx
662 return fname in self.ctx
663
663
664 def close(self):
664 def close(self):
665 return self.changed | self.removed
665 return self.changed | self.removed
666
666
667
667
668 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
668 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
669 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
669 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
670 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
670 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
671 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
671 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
672
672
673
673
674 class patchfile(object):
674 class patchfile(object):
675 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
675 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
676 self.fname = gp.path
676 self.fname = gp.path
677 self.eolmode = eolmode
677 self.eolmode = eolmode
678 self.eol = None
678 self.eol = None
679 self.backend = backend
679 self.backend = backend
680 self.ui = ui
680 self.ui = ui
681 self.lines = []
681 self.lines = []
682 self.exists = False
682 self.exists = False
683 self.missing = True
683 self.missing = True
684 self.mode = gp.mode
684 self.mode = gp.mode
685 self.copysource = gp.oldpath
685 self.copysource = gp.oldpath
686 self.create = gp.op in (b'ADD', b'COPY', b'RENAME')
686 self.create = gp.op in (b'ADD', b'COPY', b'RENAME')
687 self.remove = gp.op == b'DELETE'
687 self.remove = gp.op == b'DELETE'
688 if self.copysource is None:
688 if self.copysource is None:
689 data, mode = backend.getfile(self.fname)
689 data, mode = backend.getfile(self.fname)
690 else:
690 else:
691 data, mode = store.getfile(self.copysource)[:2]
691 data, mode = store.getfile(self.copysource)[:2]
692 if data is not None:
692 if data is not None:
693 self.exists = self.copysource is None or backend.exists(self.fname)
693 self.exists = self.copysource is None or backend.exists(self.fname)
694 self.missing = False
694 self.missing = False
695 if data:
695 if data:
696 self.lines = mdiff.splitnewlines(data)
696 self.lines = mdiff.splitnewlines(data)
697 if self.mode is None:
697 if self.mode is None:
698 self.mode = mode
698 self.mode = mode
699 if self.lines:
699 if self.lines:
700 # Normalize line endings
700 # Normalize line endings
701 if self.lines[0].endswith(b'\r\n'):
701 if self.lines[0].endswith(b'\r\n'):
702 self.eol = b'\r\n'
702 self.eol = b'\r\n'
703 elif self.lines[0].endswith(b'\n'):
703 elif self.lines[0].endswith(b'\n'):
704 self.eol = b'\n'
704 self.eol = b'\n'
705 if eolmode != b'strict':
705 if eolmode != b'strict':
706 nlines = []
706 nlines = []
707 for l in self.lines:
707 for l in self.lines:
708 if l.endswith(b'\r\n'):
708 if l.endswith(b'\r\n'):
709 l = l[:-2] + b'\n'
709 l = l[:-2] + b'\n'
710 nlines.append(l)
710 nlines.append(l)
711 self.lines = nlines
711 self.lines = nlines
712 else:
712 else:
713 if self.create:
713 if self.create:
714 self.missing = False
714 self.missing = False
715 if self.mode is None:
715 if self.mode is None:
716 self.mode = (False, False)
716 self.mode = (False, False)
717 if self.missing:
717 if self.missing:
718 self.ui.warn(_(b"unable to find '%s' for patching\n") % self.fname)
718 self.ui.warn(_(b"unable to find '%s' for patching\n") % self.fname)
719 self.ui.warn(
719 self.ui.warn(
720 _(
720 _(
721 b"(use '--prefix' to apply patch relative to the "
721 b"(use '--prefix' to apply patch relative to the "
722 b"current directory)\n"
722 b"current directory)\n"
723 )
723 )
724 )
724 )
725
725
726 self.hash = {}
726 self.hash = {}
727 self.dirty = 0
727 self.dirty = 0
728 self.offset = 0
728 self.offset = 0
729 self.skew = 0
729 self.skew = 0
730 self.rej = []
730 self.rej = []
731 self.fileprinted = False
731 self.fileprinted = False
732 self.printfile(False)
732 self.printfile(False)
733 self.hunks = 0
733 self.hunks = 0
734
734
735 def writelines(self, fname, lines, mode):
735 def writelines(self, fname, lines, mode):
736 if self.eolmode == b'auto':
736 if self.eolmode == b'auto':
737 eol = self.eol
737 eol = self.eol
738 elif self.eolmode == b'crlf':
738 elif self.eolmode == b'crlf':
739 eol = b'\r\n'
739 eol = b'\r\n'
740 else:
740 else:
741 eol = b'\n'
741 eol = b'\n'
742
742
743 if self.eolmode != b'strict' and eol and eol != b'\n':
743 if self.eolmode != b'strict' and eol and eol != b'\n':
744 rawlines = []
744 rawlines = []
745 for l in lines:
745 for l in lines:
746 if l and l.endswith(b'\n'):
746 if l and l.endswith(b'\n'):
747 l = l[:-1] + eol
747 l = l[:-1] + eol
748 rawlines.append(l)
748 rawlines.append(l)
749 lines = rawlines
749 lines = rawlines
750
750
751 self.backend.setfile(fname, b''.join(lines), mode, self.copysource)
751 self.backend.setfile(fname, b''.join(lines), mode, self.copysource)
752
752
753 def printfile(self, warn):
753 def printfile(self, warn):
754 if self.fileprinted:
754 if self.fileprinted:
755 return
755 return
756 if warn or self.ui.verbose:
756 if warn or self.ui.verbose:
757 self.fileprinted = True
757 self.fileprinted = True
758 s = _(b"patching file %s\n") % self.fname
758 s = _(b"patching file %s\n") % self.fname
759 if warn:
759 if warn:
760 self.ui.warn(s)
760 self.ui.warn(s)
761 else:
761 else:
762 self.ui.note(s)
762 self.ui.note(s)
763
763
764 def findlines(self, l, linenum):
764 def findlines(self, l, linenum):
765 # looks through the hash and finds candidate lines. The
765 # looks through the hash and finds candidate lines. The
766 # result is a list of line numbers sorted based on distance
766 # result is a list of line numbers sorted based on distance
767 # from linenum
767 # from linenum
768
768
769 cand = self.hash.get(l, [])
769 cand = self.hash.get(l, [])
770 if len(cand) > 1:
770 if len(cand) > 1:
771 # resort our list of potentials forward then back.
771 # resort our list of potentials forward then back.
772 cand.sort(key=lambda x: abs(x - linenum))
772 cand.sort(key=lambda x: abs(x - linenum))
773 return cand
773 return cand
774
774
775 def write_rej(self):
775 def write_rej(self):
776 # our rejects are a little different from patch(1). This always
776 # our rejects are a little different from patch(1). This always
777 # creates rejects in the same form as the original patch. A file
777 # creates rejects in the same form as the original patch. A file
778 # header is inserted so that you can run the reject through patch again
778 # header is inserted so that you can run the reject through patch again
779 # without having to type the filename.
779 # without having to type the filename.
780 if not self.rej:
780 if not self.rej:
781 return
781 return
782 base = os.path.basename(self.fname)
782 base = os.path.basename(self.fname)
783 lines = [b"--- %s\n+++ %s\n" % (base, base)]
783 lines = [b"--- %s\n+++ %s\n" % (base, base)]
784 for x in self.rej:
784 for x in self.rej:
785 for l in x.hunk:
785 for l in x.hunk:
786 lines.append(l)
786 lines.append(l)
787 if l[-1:] != b'\n':
787 if l[-1:] != b'\n':
788 lines.append(b"\n\\ No newline at end of file\n")
788 lines.append(b"\n\\ No newline at end of file\n")
789 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
789 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
790
790
791 def apply(self, h):
791 def apply(self, h):
792 if not h.complete():
792 if not h.complete():
793 raise PatchError(
793 raise PatchError(
794 _(b"bad hunk #%d %s (%d %d %d %d)")
794 _(b"bad hunk #%d %s (%d %d %d %d)")
795 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
795 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
796 )
796 )
797
797
798 self.hunks += 1
798 self.hunks += 1
799
799
800 if self.missing:
800 if self.missing:
801 self.rej.append(h)
801 self.rej.append(h)
802 return -1
802 return -1
803
803
804 if self.exists and self.create:
804 if self.exists and self.create:
805 if self.copysource:
805 if self.copysource:
806 self.ui.warn(
806 self.ui.warn(
807 _(b"cannot create %s: destination already exists\n")
807 _(b"cannot create %s: destination already exists\n")
808 % self.fname
808 % self.fname
809 )
809 )
810 else:
810 else:
811 self.ui.warn(_(b"file %s already exists\n") % self.fname)
811 self.ui.warn(_(b"file %s already exists\n") % self.fname)
812 self.rej.append(h)
812 self.rej.append(h)
813 return -1
813 return -1
814
814
815 if isinstance(h, binhunk):
815 if isinstance(h, binhunk):
816 if self.remove:
816 if self.remove:
817 self.backend.unlink(self.fname)
817 self.backend.unlink(self.fname)
818 else:
818 else:
819 l = h.new(self.lines)
819 l = h.new(self.lines)
820 self.lines[:] = l
820 self.lines[:] = l
821 self.offset += len(l)
821 self.offset += len(l)
822 self.dirty = True
822 self.dirty = True
823 return 0
823 return 0
824
824
825 horig = h
825 horig = h
826 if (
826 if (
827 self.eolmode in (b'crlf', b'lf')
827 self.eolmode in (b'crlf', b'lf')
828 or self.eolmode == b'auto'
828 or self.eolmode == b'auto'
829 and self.eol
829 and self.eol
830 ):
830 ):
831 # If new eols are going to be normalized, then normalize
831 # If new eols are going to be normalized, then normalize
832 # hunk data before patching. Otherwise, preserve input
832 # hunk data before patching. Otherwise, preserve input
833 # line-endings.
833 # line-endings.
834 h = h.getnormalized()
834 h = h.getnormalized()
835
835
836 # fast case first, no offsets, no fuzz
836 # fast case first, no offsets, no fuzz
837 old, oldstart, new, newstart = h.fuzzit(0, False)
837 old, oldstart, new, newstart = h.fuzzit(0, False)
838 oldstart += self.offset
838 oldstart += self.offset
839 orig_start = oldstart
839 orig_start = oldstart
840 # if there's skew we want to emit the "(offset %d lines)" even
840 # if there's skew we want to emit the "(offset %d lines)" even
841 # when the hunk cleanly applies at start + skew, so skip the
841 # when the hunk cleanly applies at start + skew, so skip the
842 # fast case code
842 # fast case code
843 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
843 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
844 if self.remove:
844 if self.remove:
845 self.backend.unlink(self.fname)
845 self.backend.unlink(self.fname)
846 else:
846 else:
847 self.lines[oldstart : oldstart + len(old)] = new
847 self.lines[oldstart : oldstart + len(old)] = new
848 self.offset += len(new) - len(old)
848 self.offset += len(new) - len(old)
849 self.dirty = True
849 self.dirty = True
850 return 0
850 return 0
851
851
852 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
852 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
853 self.hash = {}
853 self.hash = {}
854 for x, s in enumerate(self.lines):
854 for x, s in enumerate(self.lines):
855 self.hash.setdefault(s, []).append(x)
855 self.hash.setdefault(s, []).append(x)
856
856
857 for fuzzlen in pycompat.xrange(
857 for fuzzlen in pycompat.xrange(
858 self.ui.configint(b"patch", b"fuzz") + 1
858 self.ui.configint(b"patch", b"fuzz") + 1
859 ):
859 ):
860 for toponly in [True, False]:
860 for toponly in [True, False]:
861 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
861 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
862 oldstart = oldstart + self.offset + self.skew
862 oldstart = oldstart + self.offset + self.skew
863 oldstart = min(oldstart, len(self.lines))
863 oldstart = min(oldstart, len(self.lines))
864 if old:
864 if old:
865 cand = self.findlines(old[0][1:], oldstart)
865 cand = self.findlines(old[0][1:], oldstart)
866 else:
866 else:
867 # Only adding lines with no or fuzzed context, just
867 # Only adding lines with no or fuzzed context, just
868 # take the skew in account
868 # take the skew in account
869 cand = [oldstart]
869 cand = [oldstart]
870
870
871 for l in cand:
871 for l in cand:
872 if not old or diffhelper.testhunk(old, self.lines, l):
872 if not old or diffhelper.testhunk(old, self.lines, l):
873 self.lines[l : l + len(old)] = new
873 self.lines[l : l + len(old)] = new
874 self.offset += len(new) - len(old)
874 self.offset += len(new) - len(old)
875 self.skew = l - orig_start
875 self.skew = l - orig_start
876 self.dirty = True
876 self.dirty = True
877 offset = l - orig_start - fuzzlen
877 offset = l - orig_start - fuzzlen
878 if fuzzlen:
878 if fuzzlen:
879 msg = _(
879 msg = _(
880 b"Hunk #%d succeeded at %d "
880 b"Hunk #%d succeeded at %d "
881 b"with fuzz %d "
881 b"with fuzz %d "
882 b"(offset %d lines).\n"
882 b"(offset %d lines).\n"
883 )
883 )
884 self.printfile(True)
884 self.printfile(True)
885 self.ui.warn(
885 self.ui.warn(
886 msg % (h.number, l + 1, fuzzlen, offset)
886 msg % (h.number, l + 1, fuzzlen, offset)
887 )
887 )
888 else:
888 else:
889 msg = _(
889 msg = _(
890 b"Hunk #%d succeeded at %d "
890 b"Hunk #%d succeeded at %d "
891 b"(offset %d lines).\n"
891 b"(offset %d lines).\n"
892 )
892 )
893 self.ui.note(msg % (h.number, l + 1, offset))
893 self.ui.note(msg % (h.number, l + 1, offset))
894 return fuzzlen
894 return fuzzlen
895 self.printfile(True)
895 self.printfile(True)
896 self.ui.warn(_(b"Hunk #%d FAILED at %d\n") % (h.number, orig_start))
896 self.ui.warn(_(b"Hunk #%d FAILED at %d\n") % (h.number, orig_start))
897 self.rej.append(horig)
897 self.rej.append(horig)
898 return -1
898 return -1
899
899
900 def close(self):
900 def close(self):
901 if self.dirty:
901 if self.dirty:
902 self.writelines(self.fname, self.lines, self.mode)
902 self.writelines(self.fname, self.lines, self.mode)
903 self.write_rej()
903 self.write_rej()
904 return len(self.rej)
904 return len(self.rej)
905
905
906
906
907 class header(object):
907 class header(object):
908 """patch header
908 """patch header
909 """
909 """
910
910
911 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
911 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
912 diff_re = re.compile(b'diff -r .* (.*)$')
912 diff_re = re.compile(b'diff -r .* (.*)$')
913 allhunks_re = re.compile(b'(?:index|deleted file) ')
913 allhunks_re = re.compile(b'(?:index|deleted file) ')
914 pretty_re = re.compile(b'(?:new file|deleted file) ')
914 pretty_re = re.compile(b'(?:new file|deleted file) ')
915 special_re = re.compile(b'(?:index|deleted|copy|rename|new mode) ')
915 special_re = re.compile(b'(?:index|deleted|copy|rename|new mode) ')
916 newfile_re = re.compile(b'(?:new file|copy to|rename to)')
916 newfile_re = re.compile(b'(?:new file|copy to|rename to)')
917
917
918 def __init__(self, header):
918 def __init__(self, header):
919 self.header = header
919 self.header = header
920 self.hunks = []
920 self.hunks = []
921
921
922 def binary(self):
922 def binary(self):
923 return any(h.startswith(b'index ') for h in self.header)
923 return any(h.startswith(b'index ') for h in self.header)
924
924
925 def pretty(self, fp):
925 def pretty(self, fp):
926 for h in self.header:
926 for h in self.header:
927 if h.startswith(b'index '):
927 if h.startswith(b'index '):
928 fp.write(_(b'this modifies a binary file (all or nothing)\n'))
928 fp.write(_(b'this modifies a binary file (all or nothing)\n'))
929 break
929 break
930 if self.pretty_re.match(h):
930 if self.pretty_re.match(h):
931 fp.write(h)
931 fp.write(h)
932 if self.binary():
932 if self.binary():
933 fp.write(_(b'this is a binary file\n'))
933 fp.write(_(b'this is a binary file\n'))
934 break
934 break
935 if h.startswith(b'---'):
935 if h.startswith(b'---'):
936 fp.write(
936 fp.write(
937 _(b'%d hunks, %d lines changed\n')
937 _(b'%d hunks, %d lines changed\n')
938 % (
938 % (
939 len(self.hunks),
939 len(self.hunks),
940 sum([max(h.added, h.removed) for h in self.hunks]),
940 sum([max(h.added, h.removed) for h in self.hunks]),
941 )
941 )
942 )
942 )
943 break
943 break
944 fp.write(h)
944 fp.write(h)
945
945
946 def write(self, fp):
946 def write(self, fp):
947 fp.write(b''.join(self.header))
947 fp.write(b''.join(self.header))
948
948
949 def allhunks(self):
949 def allhunks(self):
950 return any(self.allhunks_re.match(h) for h in self.header)
950 return any(self.allhunks_re.match(h) for h in self.header)
951
951
952 def files(self):
952 def files(self):
953 match = self.diffgit_re.match(self.header[0])
953 match = self.diffgit_re.match(self.header[0])
954 if match:
954 if match:
955 fromfile, tofile = match.groups()
955 fromfile, tofile = match.groups()
956 if fromfile == tofile:
956 if fromfile == tofile:
957 return [fromfile]
957 return [fromfile]
958 return [fromfile, tofile]
958 return [fromfile, tofile]
959 else:
959 else:
960 return self.diff_re.match(self.header[0]).groups()
960 return self.diff_re.match(self.header[0]).groups()
961
961
962 def filename(self):
962 def filename(self):
963 return self.files()[-1]
963 return self.files()[-1]
964
964
965 def __repr__(self):
965 def __repr__(self):
966 return '<header %s>' % (
966 return '<header %s>' % (
967 ' '.join(pycompat.rapply(pycompat.fsdecode, self.files()))
967 ' '.join(pycompat.rapply(pycompat.fsdecode, self.files()))
968 )
968 )
969
969
970 def isnewfile(self):
970 def isnewfile(self):
971 return any(self.newfile_re.match(h) for h in self.header)
971 return any(self.newfile_re.match(h) for h in self.header)
972
972
973 def special(self):
973 def special(self):
974 # Special files are shown only at the header level and not at the hunk
974 # Special files are shown only at the header level and not at the hunk
975 # level for example a file that has been deleted is a special file.
975 # level for example a file that has been deleted is a special file.
976 # The user cannot change the content of the operation, in the case of
976 # The user cannot change the content of the operation, in the case of
977 # the deleted file he has to take the deletion or not take it, he
977 # the deleted file he has to take the deletion or not take it, he
978 # cannot take some of it.
978 # cannot take some of it.
979 # Newly added files are special if they are empty, they are not special
979 # Newly added files are special if they are empty, they are not special
980 # if they have some content as we want to be able to change it
980 # if they have some content as we want to be able to change it
981 nocontent = len(self.header) == 2
981 nocontent = len(self.header) == 2
982 emptynewfile = self.isnewfile() and nocontent
982 emptynewfile = self.isnewfile() and nocontent
983 return emptynewfile or any(
983 return emptynewfile or any(
984 self.special_re.match(h) for h in self.header
984 self.special_re.match(h) for h in self.header
985 )
985 )
986
986
987
987
988 class recordhunk(object):
988 class recordhunk(object):
989 """patch hunk
989 """patch hunk
990
990
991 XXX shouldn't we merge this with the other hunk class?
991 XXX shouldn't we merge this with the other hunk class?
992 """
992 """
993
993
994 def __init__(
994 def __init__(
995 self,
995 self,
996 header,
996 header,
997 fromline,
997 fromline,
998 toline,
998 toline,
999 proc,
999 proc,
1000 before,
1000 before,
1001 hunk,
1001 hunk,
1002 after,
1002 after,
1003 maxcontext=None,
1003 maxcontext=None,
1004 ):
1004 ):
1005 def trimcontext(lines, reverse=False):
1005 def trimcontext(lines, reverse=False):
1006 if maxcontext is not None:
1006 if maxcontext is not None:
1007 delta = len(lines) - maxcontext
1007 delta = len(lines) - maxcontext
1008 if delta > 0:
1008 if delta > 0:
1009 if reverse:
1009 if reverse:
1010 return delta, lines[delta:]
1010 return delta, lines[delta:]
1011 else:
1011 else:
1012 return delta, lines[:maxcontext]
1012 return delta, lines[:maxcontext]
1013 return 0, lines
1013 return 0, lines
1014
1014
1015 self.header = header
1015 self.header = header
1016 trimedbefore, self.before = trimcontext(before, True)
1016 trimedbefore, self.before = trimcontext(before, True)
1017 self.fromline = fromline + trimedbefore
1017 self.fromline = fromline + trimedbefore
1018 self.toline = toline + trimedbefore
1018 self.toline = toline + trimedbefore
1019 _trimedafter, self.after = trimcontext(after, False)
1019 _trimedafter, self.after = trimcontext(after, False)
1020 self.proc = proc
1020 self.proc = proc
1021 self.hunk = hunk
1021 self.hunk = hunk
1022 self.added, self.removed = self.countchanges(self.hunk)
1022 self.added, self.removed = self.countchanges(self.hunk)
1023
1023
1024 def __eq__(self, v):
1024 def __eq__(self, v):
1025 if not isinstance(v, recordhunk):
1025 if not isinstance(v, recordhunk):
1026 return False
1026 return False
1027
1027
1028 return (
1028 return (
1029 (v.hunk == self.hunk)
1029 (v.hunk == self.hunk)
1030 and (v.proc == self.proc)
1030 and (v.proc == self.proc)
1031 and (self.fromline == v.fromline)
1031 and (self.fromline == v.fromline)
1032 and (self.header.files() == v.header.files())
1032 and (self.header.files() == v.header.files())
1033 )
1033 )
1034
1034
1035 def __hash__(self):
1035 def __hash__(self):
1036 return hash(
1036 return hash(
1037 (
1037 (
1038 tuple(self.hunk),
1038 tuple(self.hunk),
1039 tuple(self.header.files()),
1039 tuple(self.header.files()),
1040 self.fromline,
1040 self.fromline,
1041 self.proc,
1041 self.proc,
1042 )
1042 )
1043 )
1043 )
1044
1044
1045 def countchanges(self, hunk):
1045 def countchanges(self, hunk):
1046 """hunk -> (n+,n-)"""
1046 """hunk -> (n+,n-)"""
1047 add = len([h for h in hunk if h.startswith(b'+')])
1047 add = len([h for h in hunk if h.startswith(b'+')])
1048 rem = len([h for h in hunk if h.startswith(b'-')])
1048 rem = len([h for h in hunk if h.startswith(b'-')])
1049 return add, rem
1049 return add, rem
1050
1050
1051 def reversehunk(self):
1051 def reversehunk(self):
1052 """return another recordhunk which is the reverse of the hunk
1052 """return another recordhunk which is the reverse of the hunk
1053
1053
1054 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
1054 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
1055 that, swap fromline/toline and +/- signs while keep other things
1055 that, swap fromline/toline and +/- signs while keep other things
1056 unchanged.
1056 unchanged.
1057 """
1057 """
1058 m = {b'+': b'-', b'-': b'+', b'\\': b'\\'}
1058 m = {b'+': b'-', b'-': b'+', b'\\': b'\\'}
1059 hunk = [b'%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
1059 hunk = [b'%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
1060 return recordhunk(
1060 return recordhunk(
1061 self.header,
1061 self.header,
1062 self.toline,
1062 self.toline,
1063 self.fromline,
1063 self.fromline,
1064 self.proc,
1064 self.proc,
1065 self.before,
1065 self.before,
1066 hunk,
1066 hunk,
1067 self.after,
1067 self.after,
1068 )
1068 )
1069
1069
1070 def write(self, fp):
1070 def write(self, fp):
1071 delta = len(self.before) + len(self.after)
1071 delta = len(self.before) + len(self.after)
1072 if self.after and self.after[-1] == b'\\ No newline at end of file\n':
1072 if self.after and self.after[-1] == b'\\ No newline at end of file\n':
1073 delta -= 1
1073 delta -= 1
1074 fromlen = delta + self.removed
1074 fromlen = delta + self.removed
1075 tolen = delta + self.added
1075 tolen = delta + self.added
1076 fp.write(
1076 fp.write(
1077 b'@@ -%d,%d +%d,%d @@%s\n'
1077 b'@@ -%d,%d +%d,%d @@%s\n'
1078 % (
1078 % (
1079 self.fromline,
1079 self.fromline,
1080 fromlen,
1080 fromlen,
1081 self.toline,
1081 self.toline,
1082 tolen,
1082 tolen,
1083 self.proc and (b' ' + self.proc),
1083 self.proc and (b' ' + self.proc),
1084 )
1084 )
1085 )
1085 )
1086 fp.write(b''.join(self.before + self.hunk + self.after))
1086 fp.write(b''.join(self.before + self.hunk + self.after))
1087
1087
1088 pretty = write
1088 pretty = write
1089
1089
1090 def filename(self):
1090 def filename(self):
1091 return self.header.filename()
1091 return self.header.filename()
1092
1092
1093 @encoding.strmethod
1093 @encoding.strmethod
1094 def __repr__(self):
1094 def __repr__(self):
1095 return b'<hunk %r@%d>' % (self.filename(), self.fromline)
1095 return b'<hunk %r@%d>' % (self.filename(), self.fromline)
1096
1096
1097
1097
1098 def getmessages():
1098 def getmessages():
1099 return {
1099 return {
1100 b'multiple': {
1100 b'multiple': {
1101 b'apply': _(b"apply change %d/%d to '%s'?"),
1101 b'apply': _(b"apply change %d/%d to '%s'?"),
1102 b'discard': _(b"discard change %d/%d to '%s'?"),
1102 b'discard': _(b"discard change %d/%d to '%s'?"),
1103 b'keep': _(b"keep change %d/%d to '%s'?"),
1103 b'keep': _(b"keep change %d/%d to '%s'?"),
1104 b'record': _(b"record change %d/%d to '%s'?"),
1104 b'record': _(b"record change %d/%d to '%s'?"),
1105 },
1105 },
1106 b'single': {
1106 b'single': {
1107 b'apply': _(b"apply this change to '%s'?"),
1107 b'apply': _(b"apply this change to '%s'?"),
1108 b'discard': _(b"discard this change to '%s'?"),
1108 b'discard': _(b"discard this change to '%s'?"),
1109 b'keep': _(b"keep this change to '%s'?"),
1109 b'keep': _(b"keep this change to '%s'?"),
1110 b'record': _(b"record this change to '%s'?"),
1110 b'record': _(b"record this change to '%s'?"),
1111 },
1111 },
1112 b'help': {
1112 b'help': {
1113 b'apply': _(
1113 b'apply': _(
1114 b'[Ynesfdaq?]'
1114 b'[Ynesfdaq?]'
1115 b'$$ &Yes, apply this change'
1115 b'$$ &Yes, apply this change'
1116 b'$$ &No, skip this change'
1116 b'$$ &No, skip this change'
1117 b'$$ &Edit this change manually'
1117 b'$$ &Edit this change manually'
1118 b'$$ &Skip remaining changes to this file'
1118 b'$$ &Skip remaining changes to this file'
1119 b'$$ Apply remaining changes to this &file'
1119 b'$$ Apply remaining changes to this &file'
1120 b'$$ &Done, skip remaining changes and files'
1120 b'$$ &Done, skip remaining changes and files'
1121 b'$$ Apply &all changes to all remaining files'
1121 b'$$ Apply &all changes to all remaining files'
1122 b'$$ &Quit, applying no changes'
1122 b'$$ &Quit, applying no changes'
1123 b'$$ &? (display help)'
1123 b'$$ &? (display help)'
1124 ),
1124 ),
1125 b'discard': _(
1125 b'discard': _(
1126 b'[Ynesfdaq?]'
1126 b'[Ynesfdaq?]'
1127 b'$$ &Yes, discard this change'
1127 b'$$ &Yes, discard this change'
1128 b'$$ &No, skip this change'
1128 b'$$ &No, skip this change'
1129 b'$$ &Edit this change manually'
1129 b'$$ &Edit this change manually'
1130 b'$$ &Skip remaining changes to this file'
1130 b'$$ &Skip remaining changes to this file'
1131 b'$$ Discard remaining changes to this &file'
1131 b'$$ Discard remaining changes to this &file'
1132 b'$$ &Done, skip remaining changes and files'
1132 b'$$ &Done, skip remaining changes and files'
1133 b'$$ Discard &all changes to all remaining files'
1133 b'$$ Discard &all changes to all remaining files'
1134 b'$$ &Quit, discarding no changes'
1134 b'$$ &Quit, discarding no changes'
1135 b'$$ &? (display help)'
1135 b'$$ &? (display help)'
1136 ),
1136 ),
1137 b'keep': _(
1137 b'keep': _(
1138 b'[Ynesfdaq?]'
1138 b'[Ynesfdaq?]'
1139 b'$$ &Yes, keep this change'
1139 b'$$ &Yes, keep this change'
1140 b'$$ &No, skip this change'
1140 b'$$ &No, skip this change'
1141 b'$$ &Edit this change manually'
1141 b'$$ &Edit this change manually'
1142 b'$$ &Skip remaining changes to this file'
1142 b'$$ &Skip remaining changes to this file'
1143 b'$$ Keep remaining changes to this &file'
1143 b'$$ Keep remaining changes to this &file'
1144 b'$$ &Done, skip remaining changes and files'
1144 b'$$ &Done, skip remaining changes and files'
1145 b'$$ Keep &all changes to all remaining files'
1145 b'$$ Keep &all changes to all remaining files'
1146 b'$$ &Quit, keeping all changes'
1146 b'$$ &Quit, keeping all changes'
1147 b'$$ &? (display help)'
1147 b'$$ &? (display help)'
1148 ),
1148 ),
1149 b'record': _(
1149 b'record': _(
1150 b'[Ynesfdaq?]'
1150 b'[Ynesfdaq?]'
1151 b'$$ &Yes, record this change'
1151 b'$$ &Yes, record this change'
1152 b'$$ &No, skip this change'
1152 b'$$ &No, skip this change'
1153 b'$$ &Edit this change manually'
1153 b'$$ &Edit this change manually'
1154 b'$$ &Skip remaining changes to this file'
1154 b'$$ &Skip remaining changes to this file'
1155 b'$$ Record remaining changes to this &file'
1155 b'$$ Record remaining changes to this &file'
1156 b'$$ &Done, skip remaining changes and files'
1156 b'$$ &Done, skip remaining changes and files'
1157 b'$$ Record &all changes to all remaining files'
1157 b'$$ Record &all changes to all remaining files'
1158 b'$$ &Quit, recording no changes'
1158 b'$$ &Quit, recording no changes'
1159 b'$$ &? (display help)'
1159 b'$$ &? (display help)'
1160 ),
1160 ),
1161 },
1161 },
1162 }
1162 }
1163
1163
1164
1164
1165 def filterpatch(ui, headers, match, operation=None):
1165 def filterpatch(ui, headers, match, operation=None):
1166 """Interactively filter patch chunks into applied-only chunks"""
1166 """Interactively filter patch chunks into applied-only chunks"""
1167 messages = getmessages()
1167 messages = getmessages()
1168
1168
1169 if operation is None:
1169 if operation is None:
1170 operation = b'record'
1170 operation = b'record'
1171
1171
1172 def prompt(skipfile, skipall, query, chunk):
1172 def prompt(skipfile, skipall, query, chunk):
1173 """prompt query, and process base inputs
1173 """prompt query, and process base inputs
1174
1174
1175 - y/n for the rest of file
1175 - y/n for the rest of file
1176 - y/n for the rest
1176 - y/n for the rest
1177 - ? (help)
1177 - ? (help)
1178 - q (quit)
1178 - q (quit)
1179
1179
1180 Return True/False and possibly updated skipfile and skipall.
1180 Return True/False and possibly updated skipfile and skipall.
1181 """
1181 """
1182 newpatches = None
1182 newpatches = None
1183 if skipall is not None:
1183 if skipall is not None:
1184 return skipall, skipfile, skipall, newpatches
1184 return skipall, skipfile, skipall, newpatches
1185 if skipfile is not None:
1185 if skipfile is not None:
1186 return skipfile, skipfile, skipall, newpatches
1186 return skipfile, skipfile, skipall, newpatches
1187 while True:
1187 while True:
1188 resps = messages[b'help'][operation]
1188 resps = messages[b'help'][operation]
1189 # IMPORTANT: keep the last line of this prompt short (<40 english
1189 # IMPORTANT: keep the last line of this prompt short (<40 english
1190 # chars is a good target) because of issue6158.
1190 # chars is a good target) because of issue6158.
1191 r = ui.promptchoice(b"%s\n(enter ? for help) %s" % (query, resps))
1191 r = ui.promptchoice(b"%s\n(enter ? for help) %s" % (query, resps))
1192 ui.write(b"\n")
1192 ui.write(b"\n")
1193 if r == 8: # ?
1193 if r == 8: # ?
1194 for c, t in ui.extractchoices(resps)[1]:
1194 for c, t in ui.extractchoices(resps)[1]:
1195 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
1195 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
1196 continue
1196 continue
1197 elif r == 0: # yes
1197 elif r == 0: # yes
1198 ret = True
1198 ret = True
1199 elif r == 1: # no
1199 elif r == 1: # no
1200 ret = False
1200 ret = False
1201 elif r == 2: # Edit patch
1201 elif r == 2: # Edit patch
1202 if chunk is None:
1202 if chunk is None:
1203 ui.write(_(b'cannot edit patch for whole file'))
1203 ui.write(_(b'cannot edit patch for whole file'))
1204 ui.write(b"\n")
1204 ui.write(b"\n")
1205 continue
1205 continue
1206 if chunk.header.binary():
1206 if chunk.header.binary():
1207 ui.write(_(b'cannot edit patch for binary file'))
1207 ui.write(_(b'cannot edit patch for binary file'))
1208 ui.write(b"\n")
1208 ui.write(b"\n")
1209 continue
1209 continue
1210 # Patch comment based on the Git one (based on comment at end of
1210 # Patch comment based on the Git one (based on comment at end of
1211 # https://mercurial-scm.org/wiki/RecordExtension)
1211 # https://mercurial-scm.org/wiki/RecordExtension)
1212 phelp = b'---' + _(
1212 phelp = b'---' + _(
1213 """
1213 """
1214 To remove '-' lines, make them ' ' lines (context).
1214 To remove '-' lines, make them ' ' lines (context).
1215 To remove '+' lines, delete them.
1215 To remove '+' lines, delete them.
1216 Lines starting with # will be removed from the patch.
1216 Lines starting with # will be removed from the patch.
1217
1217
1218 If the patch applies cleanly, the edited hunk will immediately be
1218 If the patch applies cleanly, the edited hunk will immediately be
1219 added to the record list. If it does not apply cleanly, a rejects
1219 added to the record list. If it does not apply cleanly, a rejects
1220 file will be generated: you can use that when you try again. If
1220 file will be generated: you can use that when you try again. If
1221 all lines of the hunk are removed, then the edit is aborted and
1221 all lines of the hunk are removed, then the edit is aborted and
1222 the hunk is left unchanged.
1222 the hunk is left unchanged.
1223 """
1223 """
1224 )
1224 )
1225 (patchfd, patchfn) = pycompat.mkstemp(
1225 (patchfd, patchfn) = pycompat.mkstemp(
1226 prefix=b"hg-editor-", suffix=b".diff"
1226 prefix=b"hg-editor-", suffix=b".diff"
1227 )
1227 )
1228 ncpatchfp = None
1228 ncpatchfp = None
1229 try:
1229 try:
1230 # Write the initial patch
1230 # Write the initial patch
1231 f = util.nativeeolwriter(os.fdopen(patchfd, 'wb'))
1231 f = util.nativeeolwriter(os.fdopen(patchfd, 'wb'))
1232 chunk.header.write(f)
1232 chunk.header.write(f)
1233 chunk.write(f)
1233 chunk.write(f)
1234 f.write(
1234 f.write(
1235 b''.join(
1235 b''.join(
1236 [b'# ' + i + b'\n' for i in phelp.splitlines()]
1236 [b'# ' + i + b'\n' for i in phelp.splitlines()]
1237 )
1237 )
1238 )
1238 )
1239 f.close()
1239 f.close()
1240 # Start the editor and wait for it to complete
1240 # Start the editor and wait for it to complete
1241 editor = ui.geteditor()
1241 editor = ui.geteditor()
1242 ret = ui.system(
1242 ret = ui.system(
1243 b"%s \"%s\"" % (editor, patchfn),
1243 b"%s \"%s\"" % (editor, patchfn),
1244 environ={b'HGUSER': ui.username()},
1244 environ={b'HGUSER': ui.username()},
1245 blockedtag=b'filterpatch',
1245 blockedtag=b'filterpatch',
1246 )
1246 )
1247 if ret != 0:
1247 if ret != 0:
1248 ui.warn(_(b"editor exited with exit code %d\n") % ret)
1248 ui.warn(_(b"editor exited with exit code %d\n") % ret)
1249 continue
1249 continue
1250 # Remove comment lines
1250 # Remove comment lines
1251 patchfp = open(patchfn, 'rb')
1251 patchfp = open(patchfn, 'rb')
1252 ncpatchfp = stringio()
1252 ncpatchfp = stringio()
1253 for line in util.iterfile(patchfp):
1253 for line in util.iterfile(patchfp):
1254 line = util.fromnativeeol(line)
1254 line = util.fromnativeeol(line)
1255 if not line.startswith(b'#'):
1255 if not line.startswith(b'#'):
1256 ncpatchfp.write(line)
1256 ncpatchfp.write(line)
1257 patchfp.close()
1257 patchfp.close()
1258 ncpatchfp.seek(0)
1258 ncpatchfp.seek(0)
1259 newpatches = parsepatch(ncpatchfp)
1259 newpatches = parsepatch(ncpatchfp)
1260 finally:
1260 finally:
1261 os.unlink(patchfn)
1261 os.unlink(patchfn)
1262 del ncpatchfp
1262 del ncpatchfp
1263 # Signal that the chunk shouldn't be applied as-is, but
1263 # Signal that the chunk shouldn't be applied as-is, but
1264 # provide the new patch to be used instead.
1264 # provide the new patch to be used instead.
1265 ret = False
1265 ret = False
1266 elif r == 3: # Skip
1266 elif r == 3: # Skip
1267 ret = skipfile = False
1267 ret = skipfile = False
1268 elif r == 4: # file (Record remaining)
1268 elif r == 4: # file (Record remaining)
1269 ret = skipfile = True
1269 ret = skipfile = True
1270 elif r == 5: # done, skip remaining
1270 elif r == 5: # done, skip remaining
1271 ret = skipall = False
1271 ret = skipall = False
1272 elif r == 6: # all
1272 elif r == 6: # all
1273 ret = skipall = True
1273 ret = skipall = True
1274 elif r == 7: # quit
1274 elif r == 7: # quit
1275 raise error.Abort(_(b'user quit'))
1275 raise error.Abort(_(b'user quit'))
1276 return ret, skipfile, skipall, newpatches
1276 return ret, skipfile, skipall, newpatches
1277
1277
1278 seen = set()
1278 seen = set()
1279 applied = {} # 'filename' -> [] of chunks
1279 applied = {} # 'filename' -> [] of chunks
1280 skipfile, skipall = None, None
1280 skipfile, skipall = None, None
1281 pos, total = 1, sum(len(h.hunks) for h in headers)
1281 pos, total = 1, sum(len(h.hunks) for h in headers)
1282 for h in headers:
1282 for h in headers:
1283 pos += len(h.hunks)
1283 pos += len(h.hunks)
1284 skipfile = None
1284 skipfile = None
1285 fixoffset = 0
1285 fixoffset = 0
1286 hdr = b''.join(h.header)
1286 hdr = b''.join(h.header)
1287 if hdr in seen:
1287 if hdr in seen:
1288 continue
1288 continue
1289 seen.add(hdr)
1289 seen.add(hdr)
1290 if skipall is None:
1290 if skipall is None:
1291 h.pretty(ui)
1291 h.pretty(ui)
1292 files = h.files()
1292 files = h.files()
1293 msg = _(b'examine changes to %s?') % _(b' and ').join(
1293 msg = _(b'examine changes to %s?') % _(b' and ').join(
1294 b"'%s'" % f for f in files
1294 b"'%s'" % f for f in files
1295 )
1295 )
1296 if all(match.exact(f) for f in files):
1296 if all(match.exact(f) for f in files):
1297 r, skipall, np = True, None, None
1297 r, skipall, np = True, None, None
1298 else:
1298 else:
1299 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1299 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1300 if not r:
1300 if not r:
1301 continue
1301 continue
1302 applied[h.filename()] = [h]
1302 applied[h.filename()] = [h]
1303 if h.allhunks():
1303 if h.allhunks():
1304 applied[h.filename()] += h.hunks
1304 applied[h.filename()] += h.hunks
1305 continue
1305 continue
1306 for i, chunk in enumerate(h.hunks):
1306 for i, chunk in enumerate(h.hunks):
1307 if skipfile is None and skipall is None:
1307 if skipfile is None and skipall is None:
1308 chunk.pretty(ui)
1308 chunk.pretty(ui)
1309 if total == 1:
1309 if total == 1:
1310 msg = messages[b'single'][operation] % chunk.filename()
1310 msg = messages[b'single'][operation] % chunk.filename()
1311 else:
1311 else:
1312 idx = pos - len(h.hunks) + i
1312 idx = pos - len(h.hunks) + i
1313 msg = messages[b'multiple'][operation] % (
1313 msg = messages[b'multiple'][operation] % (
1314 idx,
1314 idx,
1315 total,
1315 total,
1316 chunk.filename(),
1316 chunk.filename(),
1317 )
1317 )
1318 r, skipfile, skipall, newpatches = prompt(
1318 r, skipfile, skipall, newpatches = prompt(
1319 skipfile, skipall, msg, chunk
1319 skipfile, skipall, msg, chunk
1320 )
1320 )
1321 if r:
1321 if r:
1322 if fixoffset:
1322 if fixoffset:
1323 chunk = copy.copy(chunk)
1323 chunk = copy.copy(chunk)
1324 chunk.toline += fixoffset
1324 chunk.toline += fixoffset
1325 applied[chunk.filename()].append(chunk)
1325 applied[chunk.filename()].append(chunk)
1326 elif newpatches is not None:
1326 elif newpatches is not None:
1327 for newpatch in newpatches:
1327 for newpatch in newpatches:
1328 for newhunk in newpatch.hunks:
1328 for newhunk in newpatch.hunks:
1329 if fixoffset:
1329 if fixoffset:
1330 newhunk.toline += fixoffset
1330 newhunk.toline += fixoffset
1331 applied[newhunk.filename()].append(newhunk)
1331 applied[newhunk.filename()].append(newhunk)
1332 else:
1332 else:
1333 fixoffset += chunk.removed - chunk.added
1333 fixoffset += chunk.removed - chunk.added
1334 return (
1334 return (
1335 sum(
1335 sum(
1336 [
1336 [
1337 h
1337 h
1338 for h in pycompat.itervalues(applied)
1338 for h in pycompat.itervalues(applied)
1339 if h[0].special() or len(h) > 1
1339 if h[0].special() or len(h) > 1
1340 ],
1340 ],
1341 [],
1341 [],
1342 ),
1342 ),
1343 {},
1343 {},
1344 )
1344 )
1345
1345
1346
1346
1347 class hunk(object):
1347 class hunk(object):
1348 def __init__(self, desc, num, lr, context):
1348 def __init__(self, desc, num, lr, context):
1349 self.number = num
1349 self.number = num
1350 self.desc = desc
1350 self.desc = desc
1351 self.hunk = [desc]
1351 self.hunk = [desc]
1352 self.a = []
1352 self.a = []
1353 self.b = []
1353 self.b = []
1354 self.starta = self.lena = None
1354 self.starta = self.lena = None
1355 self.startb = self.lenb = None
1355 self.startb = self.lenb = None
1356 if lr is not None:
1356 if lr is not None:
1357 if context:
1357 if context:
1358 self.read_context_hunk(lr)
1358 self.read_context_hunk(lr)
1359 else:
1359 else:
1360 self.read_unified_hunk(lr)
1360 self.read_unified_hunk(lr)
1361
1361
1362 def getnormalized(self):
1362 def getnormalized(self):
1363 """Return a copy with line endings normalized to LF."""
1363 """Return a copy with line endings normalized to LF."""
1364
1364
1365 def normalize(lines):
1365 def normalize(lines):
1366 nlines = []
1366 nlines = []
1367 for line in lines:
1367 for line in lines:
1368 if line.endswith(b'\r\n'):
1368 if line.endswith(b'\r\n'):
1369 line = line[:-2] + b'\n'
1369 line = line[:-2] + b'\n'
1370 nlines.append(line)
1370 nlines.append(line)
1371 return nlines
1371 return nlines
1372
1372
1373 # Dummy object, it is rebuilt manually
1373 # Dummy object, it is rebuilt manually
1374 nh = hunk(self.desc, self.number, None, None)
1374 nh = hunk(self.desc, self.number, None, None)
1375 nh.number = self.number
1375 nh.number = self.number
1376 nh.desc = self.desc
1376 nh.desc = self.desc
1377 nh.hunk = self.hunk
1377 nh.hunk = self.hunk
1378 nh.a = normalize(self.a)
1378 nh.a = normalize(self.a)
1379 nh.b = normalize(self.b)
1379 nh.b = normalize(self.b)
1380 nh.starta = self.starta
1380 nh.starta = self.starta
1381 nh.startb = self.startb
1381 nh.startb = self.startb
1382 nh.lena = self.lena
1382 nh.lena = self.lena
1383 nh.lenb = self.lenb
1383 nh.lenb = self.lenb
1384 return nh
1384 return nh
1385
1385
1386 def read_unified_hunk(self, lr):
1386 def read_unified_hunk(self, lr):
1387 m = unidesc.match(self.desc)
1387 m = unidesc.match(self.desc)
1388 if not m:
1388 if not m:
1389 raise PatchError(_(b"bad hunk #%d") % self.number)
1389 raise PatchError(_(b"bad hunk #%d") % self.number)
1390 self.starta, self.lena, self.startb, self.lenb = m.groups()
1390 self.starta, self.lena, self.startb, self.lenb = m.groups()
1391 if self.lena is None:
1391 if self.lena is None:
1392 self.lena = 1
1392 self.lena = 1
1393 else:
1393 else:
1394 self.lena = int(self.lena)
1394 self.lena = int(self.lena)
1395 if self.lenb is None:
1395 if self.lenb is None:
1396 self.lenb = 1
1396 self.lenb = 1
1397 else:
1397 else:
1398 self.lenb = int(self.lenb)
1398 self.lenb = int(self.lenb)
1399 self.starta = int(self.starta)
1399 self.starta = int(self.starta)
1400 self.startb = int(self.startb)
1400 self.startb = int(self.startb)
1401 try:
1401 try:
1402 diffhelper.addlines(
1402 diffhelper.addlines(
1403 lr, self.hunk, self.lena, self.lenb, self.a, self.b
1403 lr, self.hunk, self.lena, self.lenb, self.a, self.b
1404 )
1404 )
1405 except error.ParseError as e:
1405 except error.ParseError as e:
1406 raise PatchError(_(b"bad hunk #%d: %s") % (self.number, e))
1406 raise PatchError(_(b"bad hunk #%d: %s") % (self.number, e))
1407 # if we hit eof before finishing out the hunk, the last line will
1407 # if we hit eof before finishing out the hunk, the last line will
1408 # be zero length. Lets try to fix it up.
1408 # be zero length. Lets try to fix it up.
1409 while len(self.hunk[-1]) == 0:
1409 while len(self.hunk[-1]) == 0:
1410 del self.hunk[-1]
1410 del self.hunk[-1]
1411 del self.a[-1]
1411 del self.a[-1]
1412 del self.b[-1]
1412 del self.b[-1]
1413 self.lena -= 1
1413 self.lena -= 1
1414 self.lenb -= 1
1414 self.lenb -= 1
1415 self._fixnewline(lr)
1415 self._fixnewline(lr)
1416
1416
1417 def read_context_hunk(self, lr):
1417 def read_context_hunk(self, lr):
1418 self.desc = lr.readline()
1418 self.desc = lr.readline()
1419 m = contextdesc.match(self.desc)
1419 m = contextdesc.match(self.desc)
1420 if not m:
1420 if not m:
1421 raise PatchError(_(b"bad hunk #%d") % self.number)
1421 raise PatchError(_(b"bad hunk #%d") % self.number)
1422 self.starta, aend = m.groups()
1422 self.starta, aend = m.groups()
1423 self.starta = int(self.starta)
1423 self.starta = int(self.starta)
1424 if aend is None:
1424 if aend is None:
1425 aend = self.starta
1425 aend = self.starta
1426 self.lena = int(aend) - self.starta
1426 self.lena = int(aend) - self.starta
1427 if self.starta:
1427 if self.starta:
1428 self.lena += 1
1428 self.lena += 1
1429 for x in pycompat.xrange(self.lena):
1429 for x in pycompat.xrange(self.lena):
1430 l = lr.readline()
1430 l = lr.readline()
1431 if l.startswith(b'---'):
1431 if l.startswith(b'---'):
1432 # lines addition, old block is empty
1432 # lines addition, old block is empty
1433 lr.push(l)
1433 lr.push(l)
1434 break
1434 break
1435 s = l[2:]
1435 s = l[2:]
1436 if l.startswith(b'- ') or l.startswith(b'! '):
1436 if l.startswith(b'- ') or l.startswith(b'! '):
1437 u = b'-' + s
1437 u = b'-' + s
1438 elif l.startswith(b' '):
1438 elif l.startswith(b' '):
1439 u = b' ' + s
1439 u = b' ' + s
1440 else:
1440 else:
1441 raise PatchError(
1441 raise PatchError(
1442 _(b"bad hunk #%d old text line %d") % (self.number, x)
1442 _(b"bad hunk #%d old text line %d") % (self.number, x)
1443 )
1443 )
1444 self.a.append(u)
1444 self.a.append(u)
1445 self.hunk.append(u)
1445 self.hunk.append(u)
1446
1446
1447 l = lr.readline()
1447 l = lr.readline()
1448 if l.startswith(br'\ '):
1448 if l.startswith(br'\ '):
1449 s = self.a[-1][:-1]
1449 s = self.a[-1][:-1]
1450 self.a[-1] = s
1450 self.a[-1] = s
1451 self.hunk[-1] = s
1451 self.hunk[-1] = s
1452 l = lr.readline()
1452 l = lr.readline()
1453 m = contextdesc.match(l)
1453 m = contextdesc.match(l)
1454 if not m:
1454 if not m:
1455 raise PatchError(_(b"bad hunk #%d") % self.number)
1455 raise PatchError(_(b"bad hunk #%d") % self.number)
1456 self.startb, bend = m.groups()
1456 self.startb, bend = m.groups()
1457 self.startb = int(self.startb)
1457 self.startb = int(self.startb)
1458 if bend is None:
1458 if bend is None:
1459 bend = self.startb
1459 bend = self.startb
1460 self.lenb = int(bend) - self.startb
1460 self.lenb = int(bend) - self.startb
1461 if self.startb:
1461 if self.startb:
1462 self.lenb += 1
1462 self.lenb += 1
1463 hunki = 1
1463 hunki = 1
1464 for x in pycompat.xrange(self.lenb):
1464 for x in pycompat.xrange(self.lenb):
1465 l = lr.readline()
1465 l = lr.readline()
1466 if l.startswith(br'\ '):
1466 if l.startswith(br'\ '):
1467 # XXX: the only way to hit this is with an invalid line range.
1467 # XXX: the only way to hit this is with an invalid line range.
1468 # The no-eol marker is not counted in the line range, but I
1468 # The no-eol marker is not counted in the line range, but I
1469 # guess there are diff(1) out there which behave differently.
1469 # guess there are diff(1) out there which behave differently.
1470 s = self.b[-1][:-1]
1470 s = self.b[-1][:-1]
1471 self.b[-1] = s
1471 self.b[-1] = s
1472 self.hunk[hunki - 1] = s
1472 self.hunk[hunki - 1] = s
1473 continue
1473 continue
1474 if not l:
1474 if not l:
1475 # line deletions, new block is empty and we hit EOF
1475 # line deletions, new block is empty and we hit EOF
1476 lr.push(l)
1476 lr.push(l)
1477 break
1477 break
1478 s = l[2:]
1478 s = l[2:]
1479 if l.startswith(b'+ ') or l.startswith(b'! '):
1479 if l.startswith(b'+ ') or l.startswith(b'! '):
1480 u = b'+' + s
1480 u = b'+' + s
1481 elif l.startswith(b' '):
1481 elif l.startswith(b' '):
1482 u = b' ' + s
1482 u = b' ' + s
1483 elif len(self.b) == 0:
1483 elif len(self.b) == 0:
1484 # line deletions, new block is empty
1484 # line deletions, new block is empty
1485 lr.push(l)
1485 lr.push(l)
1486 break
1486 break
1487 else:
1487 else:
1488 raise PatchError(
1488 raise PatchError(
1489 _(b"bad hunk #%d old text line %d") % (self.number, x)
1489 _(b"bad hunk #%d old text line %d") % (self.number, x)
1490 )
1490 )
1491 self.b.append(s)
1491 self.b.append(s)
1492 while True:
1492 while True:
1493 if hunki >= len(self.hunk):
1493 if hunki >= len(self.hunk):
1494 h = b""
1494 h = b""
1495 else:
1495 else:
1496 h = self.hunk[hunki]
1496 h = self.hunk[hunki]
1497 hunki += 1
1497 hunki += 1
1498 if h == u:
1498 if h == u:
1499 break
1499 break
1500 elif h.startswith(b'-'):
1500 elif h.startswith(b'-'):
1501 continue
1501 continue
1502 else:
1502 else:
1503 self.hunk.insert(hunki - 1, u)
1503 self.hunk.insert(hunki - 1, u)
1504 break
1504 break
1505
1505
1506 if not self.a:
1506 if not self.a:
1507 # this happens when lines were only added to the hunk
1507 # this happens when lines were only added to the hunk
1508 for x in self.hunk:
1508 for x in self.hunk:
1509 if x.startswith(b'-') or x.startswith(b' '):
1509 if x.startswith(b'-') or x.startswith(b' '):
1510 self.a.append(x)
1510 self.a.append(x)
1511 if not self.b:
1511 if not self.b:
1512 # this happens when lines were only deleted from the hunk
1512 # this happens when lines were only deleted from the hunk
1513 for x in self.hunk:
1513 for x in self.hunk:
1514 if x.startswith(b'+') or x.startswith(b' '):
1514 if x.startswith(b'+') or x.startswith(b' '):
1515 self.b.append(x[1:])
1515 self.b.append(x[1:])
1516 # @@ -start,len +start,len @@
1516 # @@ -start,len +start,len @@
1517 self.desc = b"@@ -%d,%d +%d,%d @@\n" % (
1517 self.desc = b"@@ -%d,%d +%d,%d @@\n" % (
1518 self.starta,
1518 self.starta,
1519 self.lena,
1519 self.lena,
1520 self.startb,
1520 self.startb,
1521 self.lenb,
1521 self.lenb,
1522 )
1522 )
1523 self.hunk[0] = self.desc
1523 self.hunk[0] = self.desc
1524 self._fixnewline(lr)
1524 self._fixnewline(lr)
1525
1525
1526 def _fixnewline(self, lr):
1526 def _fixnewline(self, lr):
1527 l = lr.readline()
1527 l = lr.readline()
1528 if l.startswith(br'\ '):
1528 if l.startswith(br'\ '):
1529 diffhelper.fixnewline(self.hunk, self.a, self.b)
1529 diffhelper.fixnewline(self.hunk, self.a, self.b)
1530 else:
1530 else:
1531 lr.push(l)
1531 lr.push(l)
1532
1532
1533 def complete(self):
1533 def complete(self):
1534 return len(self.a) == self.lena and len(self.b) == self.lenb
1534 return len(self.a) == self.lena and len(self.b) == self.lenb
1535
1535
1536 def _fuzzit(self, old, new, fuzz, toponly):
1536 def _fuzzit(self, old, new, fuzz, toponly):
1537 # this removes context lines from the top and bottom of list 'l'. It
1537 # this removes context lines from the top and bottom of list 'l'. It
1538 # checks the hunk to make sure only context lines are removed, and then
1538 # checks the hunk to make sure only context lines are removed, and then
1539 # returns a new shortened list of lines.
1539 # returns a new shortened list of lines.
1540 fuzz = min(fuzz, len(old))
1540 fuzz = min(fuzz, len(old))
1541 if fuzz:
1541 if fuzz:
1542 top = 0
1542 top = 0
1543 bot = 0
1543 bot = 0
1544 hlen = len(self.hunk)
1544 hlen = len(self.hunk)
1545 for x in pycompat.xrange(hlen - 1):
1545 for x in pycompat.xrange(hlen - 1):
1546 # the hunk starts with the @@ line, so use x+1
1546 # the hunk starts with the @@ line, so use x+1
1547 if self.hunk[x + 1].startswith(b' '):
1547 if self.hunk[x + 1].startswith(b' '):
1548 top += 1
1548 top += 1
1549 else:
1549 else:
1550 break
1550 break
1551 if not toponly:
1551 if not toponly:
1552 for x in pycompat.xrange(hlen - 1):
1552 for x in pycompat.xrange(hlen - 1):
1553 if self.hunk[hlen - bot - 1].startswith(b' '):
1553 if self.hunk[hlen - bot - 1].startswith(b' '):
1554 bot += 1
1554 bot += 1
1555 else:
1555 else:
1556 break
1556 break
1557
1557
1558 bot = min(fuzz, bot)
1558 bot = min(fuzz, bot)
1559 top = min(fuzz, top)
1559 top = min(fuzz, top)
1560 return old[top : len(old) - bot], new[top : len(new) - bot], top
1560 return old[top : len(old) - bot], new[top : len(new) - bot], top
1561 return old, new, 0
1561 return old, new, 0
1562
1562
1563 def fuzzit(self, fuzz, toponly):
1563 def fuzzit(self, fuzz, toponly):
1564 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1564 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1565 oldstart = self.starta + top
1565 oldstart = self.starta + top
1566 newstart = self.startb + top
1566 newstart = self.startb + top
1567 # zero length hunk ranges already have their start decremented
1567 # zero length hunk ranges already have their start decremented
1568 if self.lena and oldstart > 0:
1568 if self.lena and oldstart > 0:
1569 oldstart -= 1
1569 oldstart -= 1
1570 if self.lenb and newstart > 0:
1570 if self.lenb and newstart > 0:
1571 newstart -= 1
1571 newstart -= 1
1572 return old, oldstart, new, newstart
1572 return old, oldstart, new, newstart
1573
1573
1574
1574
1575 class binhunk(object):
1575 class binhunk(object):
1576 """A binary patch file."""
1576 """A binary patch file."""
1577
1577
1578 def __init__(self, lr, fname):
1578 def __init__(self, lr, fname):
1579 self.text = None
1579 self.text = None
1580 self.delta = False
1580 self.delta = False
1581 self.hunk = [b'GIT binary patch\n']
1581 self.hunk = [b'GIT binary patch\n']
1582 self._fname = fname
1582 self._fname = fname
1583 self._read(lr)
1583 self._read(lr)
1584
1584
1585 def complete(self):
1585 def complete(self):
1586 return self.text is not None
1586 return self.text is not None
1587
1587
1588 def new(self, lines):
1588 def new(self, lines):
1589 if self.delta:
1589 if self.delta:
1590 return [applybindelta(self.text, b''.join(lines))]
1590 return [applybindelta(self.text, b''.join(lines))]
1591 return [self.text]
1591 return [self.text]
1592
1592
1593 def _read(self, lr):
1593 def _read(self, lr):
1594 def getline(lr, hunk):
1594 def getline(lr, hunk):
1595 l = lr.readline()
1595 l = lr.readline()
1596 hunk.append(l)
1596 hunk.append(l)
1597 return l.rstrip(b'\r\n')
1597 return l.rstrip(b'\r\n')
1598
1598
1599 while True:
1599 while True:
1600 line = getline(lr, self.hunk)
1600 line = getline(lr, self.hunk)
1601 if not line:
1601 if not line:
1602 raise PatchError(
1602 raise PatchError(
1603 _(b'could not extract "%s" binary data') % self._fname
1603 _(b'could not extract "%s" binary data') % self._fname
1604 )
1604 )
1605 if line.startswith(b'literal '):
1605 if line.startswith(b'literal '):
1606 size = int(line[8:].rstrip())
1606 size = int(line[8:].rstrip())
1607 break
1607 break
1608 if line.startswith(b'delta '):
1608 if line.startswith(b'delta '):
1609 size = int(line[6:].rstrip())
1609 size = int(line[6:].rstrip())
1610 self.delta = True
1610 self.delta = True
1611 break
1611 break
1612 dec = []
1612 dec = []
1613 line = getline(lr, self.hunk)
1613 line = getline(lr, self.hunk)
1614 while len(line) > 1:
1614 while len(line) > 1:
1615 l = line[0:1]
1615 l = line[0:1]
1616 if l <= b'Z' and l >= b'A':
1616 if l <= b'Z' and l >= b'A':
1617 l = ord(l) - ord(b'A') + 1
1617 l = ord(l) - ord(b'A') + 1
1618 else:
1618 else:
1619 l = ord(l) - ord(b'a') + 27
1619 l = ord(l) - ord(b'a') + 27
1620 try:
1620 try:
1621 dec.append(util.b85decode(line[1:])[:l])
1621 dec.append(util.b85decode(line[1:])[:l])
1622 except ValueError as e:
1622 except ValueError as e:
1623 raise PatchError(
1623 raise PatchError(
1624 _(b'could not decode "%s" binary patch: %s')
1624 _(b'could not decode "%s" binary patch: %s')
1625 % (self._fname, stringutil.forcebytestr(e))
1625 % (self._fname, stringutil.forcebytestr(e))
1626 )
1626 )
1627 line = getline(lr, self.hunk)
1627 line = getline(lr, self.hunk)
1628 text = zlib.decompress(b''.join(dec))
1628 text = zlib.decompress(b''.join(dec))
1629 if len(text) != size:
1629 if len(text) != size:
1630 raise PatchError(
1630 raise PatchError(
1631 _(b'"%s" length is %d bytes, should be %d')
1631 _(b'"%s" length is %d bytes, should be %d')
1632 % (self._fname, len(text), size)
1632 % (self._fname, len(text), size)
1633 )
1633 )
1634 self.text = text
1634 self.text = text
1635
1635
1636
1636
1637 def parsefilename(str):
1637 def parsefilename(str):
1638 # --- filename \t|space stuff
1638 # --- filename \t|space stuff
1639 s = str[4:].rstrip(b'\r\n')
1639 s = str[4:].rstrip(b'\r\n')
1640 i = s.find(b'\t')
1640 i = s.find(b'\t')
1641 if i < 0:
1641 if i < 0:
1642 i = s.find(b' ')
1642 i = s.find(b' ')
1643 if i < 0:
1643 if i < 0:
1644 return s
1644 return s
1645 return s[:i]
1645 return s[:i]
1646
1646
1647
1647
1648 def reversehunks(hunks):
1648 def reversehunks(hunks):
1649 '''reverse the signs in the hunks given as argument
1649 '''reverse the signs in the hunks given as argument
1650
1650
1651 This function operates on hunks coming out of patch.filterpatch, that is
1651 This function operates on hunks coming out of patch.filterpatch, that is
1652 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1652 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1653
1653
1654 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1654 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1655 ... --- a/folder1/g
1655 ... --- a/folder1/g
1656 ... +++ b/folder1/g
1656 ... +++ b/folder1/g
1657 ... @@ -1,7 +1,7 @@
1657 ... @@ -1,7 +1,7 @@
1658 ... +firstline
1658 ... +firstline
1659 ... c
1659 ... c
1660 ... 1
1660 ... 1
1661 ... 2
1661 ... 2
1662 ... + 3
1662 ... + 3
1663 ... -4
1663 ... -4
1664 ... 5
1664 ... 5
1665 ... d
1665 ... d
1666 ... +lastline"""
1666 ... +lastline"""
1667 >>> hunks = parsepatch([rawpatch])
1667 >>> hunks = parsepatch([rawpatch])
1668 >>> hunkscomingfromfilterpatch = []
1668 >>> hunkscomingfromfilterpatch = []
1669 >>> for h in hunks:
1669 >>> for h in hunks:
1670 ... hunkscomingfromfilterpatch.append(h)
1670 ... hunkscomingfromfilterpatch.append(h)
1671 ... hunkscomingfromfilterpatch.extend(h.hunks)
1671 ... hunkscomingfromfilterpatch.extend(h.hunks)
1672
1672
1673 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1673 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1674 >>> from . import util
1674 >>> from . import util
1675 >>> fp = util.stringio()
1675 >>> fp = util.stringio()
1676 >>> for c in reversedhunks:
1676 >>> for c in reversedhunks:
1677 ... c.write(fp)
1677 ... c.write(fp)
1678 >>> fp.seek(0) or None
1678 >>> fp.seek(0) or None
1679 >>> reversedpatch = fp.read()
1679 >>> reversedpatch = fp.read()
1680 >>> print(pycompat.sysstr(reversedpatch))
1680 >>> print(pycompat.sysstr(reversedpatch))
1681 diff --git a/folder1/g b/folder1/g
1681 diff --git a/folder1/g b/folder1/g
1682 --- a/folder1/g
1682 --- a/folder1/g
1683 +++ b/folder1/g
1683 +++ b/folder1/g
1684 @@ -1,4 +1,3 @@
1684 @@ -1,4 +1,3 @@
1685 -firstline
1685 -firstline
1686 c
1686 c
1687 1
1687 1
1688 2
1688 2
1689 @@ -2,6 +1,6 @@
1689 @@ -2,6 +1,6 @@
1690 c
1690 c
1691 1
1691 1
1692 2
1692 2
1693 - 3
1693 - 3
1694 +4
1694 +4
1695 5
1695 5
1696 d
1696 d
1697 @@ -6,3 +5,2 @@
1697 @@ -6,3 +5,2 @@
1698 5
1698 5
1699 d
1699 d
1700 -lastline
1700 -lastline
1701
1701
1702 '''
1702 '''
1703
1703
1704 newhunks = []
1704 newhunks = []
1705 for c in hunks:
1705 for c in hunks:
1706 if util.safehasattr(c, b'reversehunk'):
1706 if util.safehasattr(c, b'reversehunk'):
1707 c = c.reversehunk()
1707 c = c.reversehunk()
1708 newhunks.append(c)
1708 newhunks.append(c)
1709 return newhunks
1709 return newhunks
1710
1710
1711
1711
1712 def parsepatch(originalchunks, maxcontext=None):
1712 def parsepatch(originalchunks, maxcontext=None):
1713 """patch -> [] of headers -> [] of hunks
1713 """patch -> [] of headers -> [] of hunks
1714
1714
1715 If maxcontext is not None, trim context lines if necessary.
1715 If maxcontext is not None, trim context lines if necessary.
1716
1716
1717 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1717 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1718 ... --- a/folder1/g
1718 ... --- a/folder1/g
1719 ... +++ b/folder1/g
1719 ... +++ b/folder1/g
1720 ... @@ -1,8 +1,10 @@
1720 ... @@ -1,8 +1,10 @@
1721 ... 1
1721 ... 1
1722 ... 2
1722 ... 2
1723 ... -3
1723 ... -3
1724 ... 4
1724 ... 4
1725 ... 5
1725 ... 5
1726 ... 6
1726 ... 6
1727 ... +6.1
1727 ... +6.1
1728 ... +6.2
1728 ... +6.2
1729 ... 7
1729 ... 7
1730 ... 8
1730 ... 8
1731 ... +9'''
1731 ... +9'''
1732 >>> out = util.stringio()
1732 >>> out = util.stringio()
1733 >>> headers = parsepatch([rawpatch], maxcontext=1)
1733 >>> headers = parsepatch([rawpatch], maxcontext=1)
1734 >>> for header in headers:
1734 >>> for header in headers:
1735 ... header.write(out)
1735 ... header.write(out)
1736 ... for hunk in header.hunks:
1736 ... for hunk in header.hunks:
1737 ... hunk.write(out)
1737 ... hunk.write(out)
1738 >>> print(pycompat.sysstr(out.getvalue()))
1738 >>> print(pycompat.sysstr(out.getvalue()))
1739 diff --git a/folder1/g b/folder1/g
1739 diff --git a/folder1/g b/folder1/g
1740 --- a/folder1/g
1740 --- a/folder1/g
1741 +++ b/folder1/g
1741 +++ b/folder1/g
1742 @@ -2,3 +2,2 @@
1742 @@ -2,3 +2,2 @@
1743 2
1743 2
1744 -3
1744 -3
1745 4
1745 4
1746 @@ -6,2 +5,4 @@
1746 @@ -6,2 +5,4 @@
1747 6
1747 6
1748 +6.1
1748 +6.1
1749 +6.2
1749 +6.2
1750 7
1750 7
1751 @@ -8,1 +9,2 @@
1751 @@ -8,1 +9,2 @@
1752 8
1752 8
1753 +9
1753 +9
1754 """
1754 """
1755
1755
1756 class parser(object):
1756 class parser(object):
1757 """patch parsing state machine"""
1757 """patch parsing state machine"""
1758
1758
1759 def __init__(self):
1759 def __init__(self):
1760 self.fromline = 0
1760 self.fromline = 0
1761 self.toline = 0
1761 self.toline = 0
1762 self.proc = b''
1762 self.proc = b''
1763 self.header = None
1763 self.header = None
1764 self.context = []
1764 self.context = []
1765 self.before = []
1765 self.before = []
1766 self.hunk = []
1766 self.hunk = []
1767 self.headers = []
1767 self.headers = []
1768
1768
1769 def addrange(self, limits):
1769 def addrange(self, limits):
1770 self.addcontext([])
1770 self.addcontext([])
1771 fromstart, fromend, tostart, toend, proc = limits
1771 fromstart, fromend, tostart, toend, proc = limits
1772 self.fromline = int(fromstart)
1772 self.fromline = int(fromstart)
1773 self.toline = int(tostart)
1773 self.toline = int(tostart)
1774 self.proc = proc
1774 self.proc = proc
1775
1775
1776 def addcontext(self, context):
1776 def addcontext(self, context):
1777 if self.hunk:
1777 if self.hunk:
1778 h = recordhunk(
1778 h = recordhunk(
1779 self.header,
1779 self.header,
1780 self.fromline,
1780 self.fromline,
1781 self.toline,
1781 self.toline,
1782 self.proc,
1782 self.proc,
1783 self.before,
1783 self.before,
1784 self.hunk,
1784 self.hunk,
1785 context,
1785 context,
1786 maxcontext,
1786 maxcontext,
1787 )
1787 )
1788 self.header.hunks.append(h)
1788 self.header.hunks.append(h)
1789 self.fromline += len(self.before) + h.removed
1789 self.fromline += len(self.before) + h.removed
1790 self.toline += len(self.before) + h.added
1790 self.toline += len(self.before) + h.added
1791 self.before = []
1791 self.before = []
1792 self.hunk = []
1792 self.hunk = []
1793 self.context = context
1793 self.context = context
1794
1794
1795 def addhunk(self, hunk):
1795 def addhunk(self, hunk):
1796 if self.context:
1796 if self.context:
1797 self.before = self.context
1797 self.before = self.context
1798 self.context = []
1798 self.context = []
1799 if self.hunk:
1799 if self.hunk:
1800 self.addcontext([])
1800 self.addcontext([])
1801 self.hunk = hunk
1801 self.hunk = hunk
1802
1802
1803 def newfile(self, hdr):
1803 def newfile(self, hdr):
1804 self.addcontext([])
1804 self.addcontext([])
1805 h = header(hdr)
1805 h = header(hdr)
1806 self.headers.append(h)
1806 self.headers.append(h)
1807 self.header = h
1807 self.header = h
1808
1808
1809 def addother(self, line):
1809 def addother(self, line):
1810 pass # 'other' lines are ignored
1810 pass # 'other' lines are ignored
1811
1811
1812 def finished(self):
1812 def finished(self):
1813 self.addcontext([])
1813 self.addcontext([])
1814 return self.headers
1814 return self.headers
1815
1815
1816 transitions = {
1816 transitions = {
1817 b'file': {
1817 b'file': {
1818 b'context': addcontext,
1818 b'context': addcontext,
1819 b'file': newfile,
1819 b'file': newfile,
1820 b'hunk': addhunk,
1820 b'hunk': addhunk,
1821 b'range': addrange,
1821 b'range': addrange,
1822 },
1822 },
1823 b'context': {
1823 b'context': {
1824 b'file': newfile,
1824 b'file': newfile,
1825 b'hunk': addhunk,
1825 b'hunk': addhunk,
1826 b'range': addrange,
1826 b'range': addrange,
1827 b'other': addother,
1827 b'other': addother,
1828 },
1828 },
1829 b'hunk': {
1829 b'hunk': {
1830 b'context': addcontext,
1830 b'context': addcontext,
1831 b'file': newfile,
1831 b'file': newfile,
1832 b'range': addrange,
1832 b'range': addrange,
1833 },
1833 },
1834 b'range': {b'context': addcontext, b'hunk': addhunk},
1834 b'range': {b'context': addcontext, b'hunk': addhunk},
1835 b'other': {b'other': addother},
1835 b'other': {b'other': addother},
1836 }
1836 }
1837
1837
1838 p = parser()
1838 p = parser()
1839 fp = stringio()
1839 fp = stringio()
1840 fp.write(b''.join(originalchunks))
1840 fp.write(b''.join(originalchunks))
1841 fp.seek(0)
1841 fp.seek(0)
1842
1842
1843 state = b'context'
1843 state = b'context'
1844 for newstate, data in scanpatch(fp):
1844 for newstate, data in scanpatch(fp):
1845 try:
1845 try:
1846 p.transitions[state][newstate](p, data)
1846 p.transitions[state][newstate](p, data)
1847 except KeyError:
1847 except KeyError:
1848 raise PatchError(
1848 raise PatchError(
1849 b'unhandled transition: %s -> %s' % (state, newstate)
1849 b'unhandled transition: %s -> %s' % (state, newstate)
1850 )
1850 )
1851 state = newstate
1851 state = newstate
1852 del fp
1852 del fp
1853 return p.finished()
1853 return p.finished()
1854
1854
1855
1855
1856 def pathtransform(path, strip, prefix):
1856 def pathtransform(path, strip, prefix):
1857 '''turn a path from a patch into a path suitable for the repository
1857 '''turn a path from a patch into a path suitable for the repository
1858
1858
1859 prefix, if not empty, is expected to be normalized with a / at the end.
1859 prefix, if not empty, is expected to be normalized with a / at the end.
1860
1860
1861 Returns (stripped components, path in repository).
1861 Returns (stripped components, path in repository).
1862
1862
1863 >>> pathtransform(b'a/b/c', 0, b'')
1863 >>> pathtransform(b'a/b/c', 0, b'')
1864 ('', 'a/b/c')
1864 ('', 'a/b/c')
1865 >>> pathtransform(b' a/b/c ', 0, b'')
1865 >>> pathtransform(b' a/b/c ', 0, b'')
1866 ('', ' a/b/c')
1866 ('', ' a/b/c')
1867 >>> pathtransform(b' a/b/c ', 2, b'')
1867 >>> pathtransform(b' a/b/c ', 2, b'')
1868 ('a/b/', 'c')
1868 ('a/b/', 'c')
1869 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1869 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1870 ('', 'd/e/a/b/c')
1870 ('', 'd/e/a/b/c')
1871 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1871 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1872 ('a//b/', 'd/e/c')
1872 ('a//b/', 'd/e/c')
1873 >>> pathtransform(b'a/b/c', 3, b'')
1873 >>> pathtransform(b'a/b/c', 3, b'')
1874 Traceback (most recent call last):
1874 Traceback (most recent call last):
1875 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1875 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1876 '''
1876 '''
1877 pathlen = len(path)
1877 pathlen = len(path)
1878 i = 0
1878 i = 0
1879 if strip == 0:
1879 if strip == 0:
1880 return b'', prefix + path.rstrip()
1880 return b'', prefix + path.rstrip()
1881 count = strip
1881 count = strip
1882 while count > 0:
1882 while count > 0:
1883 i = path.find(b'/', i)
1883 i = path.find(b'/', i)
1884 if i == -1:
1884 if i == -1:
1885 raise PatchError(
1885 raise PatchError(
1886 _(b"unable to strip away %d of %d dirs from %s")
1886 _(b"unable to strip away %d of %d dirs from %s")
1887 % (count, strip, path)
1887 % (count, strip, path)
1888 )
1888 )
1889 i += 1
1889 i += 1
1890 # consume '//' in the path
1890 # consume '//' in the path
1891 while i < pathlen - 1 and path[i : i + 1] == b'/':
1891 while i < pathlen - 1 and path[i : i + 1] == b'/':
1892 i += 1
1892 i += 1
1893 count -= 1
1893 count -= 1
1894 return path[:i].lstrip(), prefix + path[i:].rstrip()
1894 return path[:i].lstrip(), prefix + path[i:].rstrip()
1895
1895
1896
1896
1897 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1897 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1898 nulla = afile_orig == b"/dev/null"
1898 nulla = afile_orig == b"/dev/null"
1899 nullb = bfile_orig == b"/dev/null"
1899 nullb = bfile_orig == b"/dev/null"
1900 create = nulla and hunk.starta == 0 and hunk.lena == 0
1900 create = nulla and hunk.starta == 0 and hunk.lena == 0
1901 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1901 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1902 abase, afile = pathtransform(afile_orig, strip, prefix)
1902 abase, afile = pathtransform(afile_orig, strip, prefix)
1903 gooda = not nulla and backend.exists(afile)
1903 gooda = not nulla and backend.exists(afile)
1904 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1904 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1905 if afile == bfile:
1905 if afile == bfile:
1906 goodb = gooda
1906 goodb = gooda
1907 else:
1907 else:
1908 goodb = not nullb and backend.exists(bfile)
1908 goodb = not nullb and backend.exists(bfile)
1909 missing = not goodb and not gooda and not create
1909 missing = not goodb and not gooda and not create
1910
1910
1911 # some diff programs apparently produce patches where the afile is
1911 # some diff programs apparently produce patches where the afile is
1912 # not /dev/null, but afile starts with bfile
1912 # not /dev/null, but afile starts with bfile
1913 abasedir = afile[: afile.rfind(b'/') + 1]
1913 abasedir = afile[: afile.rfind(b'/') + 1]
1914 bbasedir = bfile[: bfile.rfind(b'/') + 1]
1914 bbasedir = bfile[: bfile.rfind(b'/') + 1]
1915 if (
1915 if (
1916 missing
1916 missing
1917 and abasedir == bbasedir
1917 and abasedir == bbasedir
1918 and afile.startswith(bfile)
1918 and afile.startswith(bfile)
1919 and hunk.starta == 0
1919 and hunk.starta == 0
1920 and hunk.lena == 0
1920 and hunk.lena == 0
1921 ):
1921 ):
1922 create = True
1922 create = True
1923 missing = False
1923 missing = False
1924
1924
1925 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1925 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1926 # diff is between a file and its backup. In this case, the original
1926 # diff is between a file and its backup. In this case, the original
1927 # file should be patched (see original mpatch code).
1927 # file should be patched (see original mpatch code).
1928 isbackup = abase == bbase and bfile.startswith(afile)
1928 isbackup = abase == bbase and bfile.startswith(afile)
1929 fname = None
1929 fname = None
1930 if not missing:
1930 if not missing:
1931 if gooda and goodb:
1931 if gooda and goodb:
1932 if isbackup:
1932 if isbackup:
1933 fname = afile
1933 fname = afile
1934 else:
1934 else:
1935 fname = bfile
1935 fname = bfile
1936 elif gooda:
1936 elif gooda:
1937 fname = afile
1937 fname = afile
1938
1938
1939 if not fname:
1939 if not fname:
1940 if not nullb:
1940 if not nullb:
1941 if isbackup:
1941 if isbackup:
1942 fname = afile
1942 fname = afile
1943 else:
1943 else:
1944 fname = bfile
1944 fname = bfile
1945 elif not nulla:
1945 elif not nulla:
1946 fname = afile
1946 fname = afile
1947 else:
1947 else:
1948 raise PatchError(_(b"undefined source and destination files"))
1948 raise PatchError(_(b"undefined source and destination files"))
1949
1949
1950 gp = patchmeta(fname)
1950 gp = patchmeta(fname)
1951 if create:
1951 if create:
1952 gp.op = b'ADD'
1952 gp.op = b'ADD'
1953 elif remove:
1953 elif remove:
1954 gp.op = b'DELETE'
1954 gp.op = b'DELETE'
1955 return gp
1955 return gp
1956
1956
1957
1957
1958 def scanpatch(fp):
1958 def scanpatch(fp):
1959 """like patch.iterhunks, but yield different events
1959 """like patch.iterhunks, but yield different events
1960
1960
1961 - ('file', [header_lines + fromfile + tofile])
1961 - ('file', [header_lines + fromfile + tofile])
1962 - ('context', [context_lines])
1962 - ('context', [context_lines])
1963 - ('hunk', [hunk_lines])
1963 - ('hunk', [hunk_lines])
1964 - ('range', (-start,len, +start,len, proc))
1964 - ('range', (-start,len, +start,len, proc))
1965 """
1965 """
1966 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1966 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1967 lr = linereader(fp)
1967 lr = linereader(fp)
1968
1968
1969 def scanwhile(first, p):
1969 def scanwhile(first, p):
1970 """scan lr while predicate holds"""
1970 """scan lr while predicate holds"""
1971 lines = [first]
1971 lines = [first]
1972 for line in iter(lr.readline, b''):
1972 for line in iter(lr.readline, b''):
1973 if p(line):
1973 if p(line):
1974 lines.append(line)
1974 lines.append(line)
1975 else:
1975 else:
1976 lr.push(line)
1976 lr.push(line)
1977 break
1977 break
1978 return lines
1978 return lines
1979
1979
1980 for line in iter(lr.readline, b''):
1980 for line in iter(lr.readline, b''):
1981 if line.startswith(b'diff --git a/') or line.startswith(b'diff -r '):
1981 if line.startswith(b'diff --git a/') or line.startswith(b'diff -r '):
1982
1982
1983 def notheader(line):
1983 def notheader(line):
1984 s = line.split(None, 1)
1984 s = line.split(None, 1)
1985 return not s or s[0] not in (b'---', b'diff')
1985 return not s or s[0] not in (b'---', b'diff')
1986
1986
1987 header = scanwhile(line, notheader)
1987 header = scanwhile(line, notheader)
1988 fromfile = lr.readline()
1988 fromfile = lr.readline()
1989 if fromfile.startswith(b'---'):
1989 if fromfile.startswith(b'---'):
1990 tofile = lr.readline()
1990 tofile = lr.readline()
1991 header += [fromfile, tofile]
1991 header += [fromfile, tofile]
1992 else:
1992 else:
1993 lr.push(fromfile)
1993 lr.push(fromfile)
1994 yield b'file', header
1994 yield b'file', header
1995 elif line.startswith(b' '):
1995 elif line.startswith(b' '):
1996 cs = (b' ', b'\\')
1996 cs = (b' ', b'\\')
1997 yield b'context', scanwhile(line, lambda l: l.startswith(cs))
1997 yield b'context', scanwhile(line, lambda l: l.startswith(cs))
1998 elif line.startswith((b'-', b'+')):
1998 elif line.startswith((b'-', b'+')):
1999 cs = (b'-', b'+', b'\\')
1999 cs = (b'-', b'+', b'\\')
2000 yield b'hunk', scanwhile(line, lambda l: l.startswith(cs))
2000 yield b'hunk', scanwhile(line, lambda l: l.startswith(cs))
2001 else:
2001 else:
2002 m = lines_re.match(line)
2002 m = lines_re.match(line)
2003 if m:
2003 if m:
2004 yield b'range', m.groups()
2004 yield b'range', m.groups()
2005 else:
2005 else:
2006 yield b'other', line
2006 yield b'other', line
2007
2007
2008
2008
2009 def scangitpatch(lr, firstline):
2009 def scangitpatch(lr, firstline):
2010 """
2010 """
2011 Git patches can emit:
2011 Git patches can emit:
2012 - rename a to b
2012 - rename a to b
2013 - change b
2013 - change b
2014 - copy a to c
2014 - copy a to c
2015 - change c
2015 - change c
2016
2016
2017 We cannot apply this sequence as-is, the renamed 'a' could not be
2017 We cannot apply this sequence as-is, the renamed 'a' could not be
2018 found for it would have been renamed already. And we cannot copy
2018 found for it would have been renamed already. And we cannot copy
2019 from 'b' instead because 'b' would have been changed already. So
2019 from 'b' instead because 'b' would have been changed already. So
2020 we scan the git patch for copy and rename commands so we can
2020 we scan the git patch for copy and rename commands so we can
2021 perform the copies ahead of time.
2021 perform the copies ahead of time.
2022 """
2022 """
2023 pos = 0
2023 pos = 0
2024 try:
2024 try:
2025 pos = lr.fp.tell()
2025 pos = lr.fp.tell()
2026 fp = lr.fp
2026 fp = lr.fp
2027 except IOError:
2027 except IOError:
2028 fp = stringio(lr.fp.read())
2028 fp = stringio(lr.fp.read())
2029 gitlr = linereader(fp)
2029 gitlr = linereader(fp)
2030 gitlr.push(firstline)
2030 gitlr.push(firstline)
2031 gitpatches = readgitpatch(gitlr)
2031 gitpatches = readgitpatch(gitlr)
2032 fp.seek(pos)
2032 fp.seek(pos)
2033 return gitpatches
2033 return gitpatches
2034
2034
2035
2035
2036 def iterhunks(fp):
2036 def iterhunks(fp):
2037 """Read a patch and yield the following events:
2037 """Read a patch and yield the following events:
2038 - ("file", afile, bfile, firsthunk): select a new target file.
2038 - ("file", afile, bfile, firsthunk): select a new target file.
2039 - ("hunk", hunk): a new hunk is ready to be applied, follows a
2039 - ("hunk", hunk): a new hunk is ready to be applied, follows a
2040 "file" event.
2040 "file" event.
2041 - ("git", gitchanges): current diff is in git format, gitchanges
2041 - ("git", gitchanges): current diff is in git format, gitchanges
2042 maps filenames to gitpatch records. Unique event.
2042 maps filenames to gitpatch records. Unique event.
2043 """
2043 """
2044 afile = b""
2044 afile = b""
2045 bfile = b""
2045 bfile = b""
2046 state = None
2046 state = None
2047 hunknum = 0
2047 hunknum = 0
2048 emitfile = newfile = False
2048 emitfile = newfile = False
2049 gitpatches = None
2049 gitpatches = None
2050
2050
2051 # our states
2051 # our states
2052 BFILE = 1
2052 BFILE = 1
2053 context = None
2053 context = None
2054 lr = linereader(fp)
2054 lr = linereader(fp)
2055
2055
2056 for x in iter(lr.readline, b''):
2056 for x in iter(lr.readline, b''):
2057 if state == BFILE and (
2057 if state == BFILE and (
2058 (not context and x.startswith(b'@'))
2058 (not context and x.startswith(b'@'))
2059 or (context is not False and x.startswith(b'***************'))
2059 or (context is not False and x.startswith(b'***************'))
2060 or x.startswith(b'GIT binary patch')
2060 or x.startswith(b'GIT binary patch')
2061 ):
2061 ):
2062 gp = None
2062 gp = None
2063 if gitpatches and gitpatches[-1].ispatching(afile, bfile):
2063 if gitpatches and gitpatches[-1].ispatching(afile, bfile):
2064 gp = gitpatches.pop()
2064 gp = gitpatches.pop()
2065 if x.startswith(b'GIT binary patch'):
2065 if x.startswith(b'GIT binary patch'):
2066 h = binhunk(lr, gp.path)
2066 h = binhunk(lr, gp.path)
2067 else:
2067 else:
2068 if context is None and x.startswith(b'***************'):
2068 if context is None and x.startswith(b'***************'):
2069 context = True
2069 context = True
2070 h = hunk(x, hunknum + 1, lr, context)
2070 h = hunk(x, hunknum + 1, lr, context)
2071 hunknum += 1
2071 hunknum += 1
2072 if emitfile:
2072 if emitfile:
2073 emitfile = False
2073 emitfile = False
2074 yield b'file', (afile, bfile, h, gp and gp.copy() or None)
2074 yield b'file', (afile, bfile, h, gp and gp.copy() or None)
2075 yield b'hunk', h
2075 yield b'hunk', h
2076 elif x.startswith(b'diff --git a/'):
2076 elif x.startswith(b'diff --git a/'):
2077 m = gitre.match(x.rstrip(b' \r\n'))
2077 m = gitre.match(x.rstrip(b' \r\n'))
2078 if not m:
2078 if not m:
2079 continue
2079 continue
2080 if gitpatches is None:
2080 if gitpatches is None:
2081 # scan whole input for git metadata
2081 # scan whole input for git metadata
2082 gitpatches = scangitpatch(lr, x)
2082 gitpatches = scangitpatch(lr, x)
2083 yield b'git', [
2083 yield b'git', [
2084 g.copy() for g in gitpatches if g.op in (b'COPY', b'RENAME')
2084 g.copy() for g in gitpatches if g.op in (b'COPY', b'RENAME')
2085 ]
2085 ]
2086 gitpatches.reverse()
2086 gitpatches.reverse()
2087 afile = b'a/' + m.group(1)
2087 afile = b'a/' + m.group(1)
2088 bfile = b'b/' + m.group(2)
2088 bfile = b'b/' + m.group(2)
2089 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
2089 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
2090 gp = gitpatches.pop()
2090 gp = gitpatches.pop()
2091 yield b'file', (
2091 yield b'file', (
2092 b'a/' + gp.path,
2092 b'a/' + gp.path,
2093 b'b/' + gp.path,
2093 b'b/' + gp.path,
2094 None,
2094 None,
2095 gp.copy(),
2095 gp.copy(),
2096 )
2096 )
2097 if not gitpatches:
2097 if not gitpatches:
2098 raise PatchError(
2098 raise PatchError(
2099 _(b'failed to synchronize metadata for "%s"') % afile[2:]
2099 _(b'failed to synchronize metadata for "%s"') % afile[2:]
2100 )
2100 )
2101 newfile = True
2101 newfile = True
2102 elif x.startswith(b'---'):
2102 elif x.startswith(b'---'):
2103 # check for a unified diff
2103 # check for a unified diff
2104 l2 = lr.readline()
2104 l2 = lr.readline()
2105 if not l2.startswith(b'+++'):
2105 if not l2.startswith(b'+++'):
2106 lr.push(l2)
2106 lr.push(l2)
2107 continue
2107 continue
2108 newfile = True
2108 newfile = True
2109 context = False
2109 context = False
2110 afile = parsefilename(x)
2110 afile = parsefilename(x)
2111 bfile = parsefilename(l2)
2111 bfile = parsefilename(l2)
2112 elif x.startswith(b'***'):
2112 elif x.startswith(b'***'):
2113 # check for a context diff
2113 # check for a context diff
2114 l2 = lr.readline()
2114 l2 = lr.readline()
2115 if not l2.startswith(b'---'):
2115 if not l2.startswith(b'---'):
2116 lr.push(l2)
2116 lr.push(l2)
2117 continue
2117 continue
2118 l3 = lr.readline()
2118 l3 = lr.readline()
2119 lr.push(l3)
2119 lr.push(l3)
2120 if not l3.startswith(b"***************"):
2120 if not l3.startswith(b"***************"):
2121 lr.push(l2)
2121 lr.push(l2)
2122 continue
2122 continue
2123 newfile = True
2123 newfile = True
2124 context = True
2124 context = True
2125 afile = parsefilename(x)
2125 afile = parsefilename(x)
2126 bfile = parsefilename(l2)
2126 bfile = parsefilename(l2)
2127
2127
2128 if newfile:
2128 if newfile:
2129 newfile = False
2129 newfile = False
2130 emitfile = True
2130 emitfile = True
2131 state = BFILE
2131 state = BFILE
2132 hunknum = 0
2132 hunknum = 0
2133
2133
2134 while gitpatches:
2134 while gitpatches:
2135 gp = gitpatches.pop()
2135 gp = gitpatches.pop()
2136 yield b'file', (b'a/' + gp.path, b'b/' + gp.path, None, gp.copy())
2136 yield b'file', (b'a/' + gp.path, b'b/' + gp.path, None, gp.copy())
2137
2137
2138
2138
2139 def applybindelta(binchunk, data):
2139 def applybindelta(binchunk, data):
2140 """Apply a binary delta hunk
2140 """Apply a binary delta hunk
2141 The algorithm used is the algorithm from git's patch-delta.c
2141 The algorithm used is the algorithm from git's patch-delta.c
2142 """
2142 """
2143
2143
2144 def deltahead(binchunk):
2144 def deltahead(binchunk):
2145 i = 0
2145 i = 0
2146 for c in pycompat.bytestr(binchunk):
2146 for c in pycompat.bytestr(binchunk):
2147 i += 1
2147 i += 1
2148 if not (ord(c) & 0x80):
2148 if not (ord(c) & 0x80):
2149 return i
2149 return i
2150 return i
2150 return i
2151
2151
2152 out = b""
2152 out = b""
2153 s = deltahead(binchunk)
2153 s = deltahead(binchunk)
2154 binchunk = binchunk[s:]
2154 binchunk = binchunk[s:]
2155 s = deltahead(binchunk)
2155 s = deltahead(binchunk)
2156 binchunk = binchunk[s:]
2156 binchunk = binchunk[s:]
2157 i = 0
2157 i = 0
2158 while i < len(binchunk):
2158 while i < len(binchunk):
2159 cmd = ord(binchunk[i : i + 1])
2159 cmd = ord(binchunk[i : i + 1])
2160 i += 1
2160 i += 1
2161 if cmd & 0x80:
2161 if cmd & 0x80:
2162 offset = 0
2162 offset = 0
2163 size = 0
2163 size = 0
2164 if cmd & 0x01:
2164 if cmd & 0x01:
2165 offset = ord(binchunk[i : i + 1])
2165 offset = ord(binchunk[i : i + 1])
2166 i += 1
2166 i += 1
2167 if cmd & 0x02:
2167 if cmd & 0x02:
2168 offset |= ord(binchunk[i : i + 1]) << 8
2168 offset |= ord(binchunk[i : i + 1]) << 8
2169 i += 1
2169 i += 1
2170 if cmd & 0x04:
2170 if cmd & 0x04:
2171 offset |= ord(binchunk[i : i + 1]) << 16
2171 offset |= ord(binchunk[i : i + 1]) << 16
2172 i += 1
2172 i += 1
2173 if cmd & 0x08:
2173 if cmd & 0x08:
2174 offset |= ord(binchunk[i : i + 1]) << 24
2174 offset |= ord(binchunk[i : i + 1]) << 24
2175 i += 1
2175 i += 1
2176 if cmd & 0x10:
2176 if cmd & 0x10:
2177 size = ord(binchunk[i : i + 1])
2177 size = ord(binchunk[i : i + 1])
2178 i += 1
2178 i += 1
2179 if cmd & 0x20:
2179 if cmd & 0x20:
2180 size |= ord(binchunk[i : i + 1]) << 8
2180 size |= ord(binchunk[i : i + 1]) << 8
2181 i += 1
2181 i += 1
2182 if cmd & 0x40:
2182 if cmd & 0x40:
2183 size |= ord(binchunk[i : i + 1]) << 16
2183 size |= ord(binchunk[i : i + 1]) << 16
2184 i += 1
2184 i += 1
2185 if size == 0:
2185 if size == 0:
2186 size = 0x10000
2186 size = 0x10000
2187 offset_end = offset + size
2187 offset_end = offset + size
2188 out += data[offset:offset_end]
2188 out += data[offset:offset_end]
2189 elif cmd != 0:
2189 elif cmd != 0:
2190 offset_end = i + cmd
2190 offset_end = i + cmd
2191 out += binchunk[i:offset_end]
2191 out += binchunk[i:offset_end]
2192 i += cmd
2192 i += cmd
2193 else:
2193 else:
2194 raise PatchError(_(b'unexpected delta opcode 0'))
2194 raise PatchError(_(b'unexpected delta opcode 0'))
2195 return out
2195 return out
2196
2196
2197
2197
2198 def applydiff(ui, fp, backend, store, strip=1, prefix=b'', eolmode=b'strict'):
2198 def applydiff(ui, fp, backend, store, strip=1, prefix=b'', eolmode=b'strict'):
2199 """Reads a patch from fp and tries to apply it.
2199 """Reads a patch from fp and tries to apply it.
2200
2200
2201 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2201 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2202 there was any fuzz.
2202 there was any fuzz.
2203
2203
2204 If 'eolmode' is 'strict', the patch content and patched file are
2204 If 'eolmode' is 'strict', the patch content and patched file are
2205 read in binary mode. Otherwise, line endings are ignored when
2205 read in binary mode. Otherwise, line endings are ignored when
2206 patching then normalized according to 'eolmode'.
2206 patching then normalized according to 'eolmode'.
2207 """
2207 """
2208 return _applydiff(
2208 return _applydiff(
2209 ui,
2209 ui,
2210 fp,
2210 fp,
2211 patchfile,
2211 patchfile,
2212 backend,
2212 backend,
2213 store,
2213 store,
2214 strip=strip,
2214 strip=strip,
2215 prefix=prefix,
2215 prefix=prefix,
2216 eolmode=eolmode,
2216 eolmode=eolmode,
2217 )
2217 )
2218
2218
2219
2219
2220 def _canonprefix(repo, prefix):
2220 def _canonprefix(repo, prefix):
2221 if prefix:
2221 if prefix:
2222 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2222 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2223 if prefix != b'':
2223 if prefix != b'':
2224 prefix += b'/'
2224 prefix += b'/'
2225 return prefix
2225 return prefix
2226
2226
2227
2227
2228 def _applydiff(
2228 def _applydiff(
2229 ui, fp, patcher, backend, store, strip=1, prefix=b'', eolmode=b'strict'
2229 ui, fp, patcher, backend, store, strip=1, prefix=b'', eolmode=b'strict'
2230 ):
2230 ):
2231 prefix = _canonprefix(backend.repo, prefix)
2231 prefix = _canonprefix(backend.repo, prefix)
2232
2232
2233 def pstrip(p):
2233 def pstrip(p):
2234 return pathtransform(p, strip - 1, prefix)[1]
2234 return pathtransform(p, strip - 1, prefix)[1]
2235
2235
2236 rejects = 0
2236 rejects = 0
2237 err = 0
2237 err = 0
2238 current_file = None
2238 current_file = None
2239
2239
2240 for state, values in iterhunks(fp):
2240 for state, values in iterhunks(fp):
2241 if state == b'hunk':
2241 if state == b'hunk':
2242 if not current_file:
2242 if not current_file:
2243 continue
2243 continue
2244 ret = current_file.apply(values)
2244 ret = current_file.apply(values)
2245 if ret > 0:
2245 if ret > 0:
2246 err = 1
2246 err = 1
2247 elif state == b'file':
2247 elif state == b'file':
2248 if current_file:
2248 if current_file:
2249 rejects += current_file.close()
2249 rejects += current_file.close()
2250 current_file = None
2250 current_file = None
2251 afile, bfile, first_hunk, gp = values
2251 afile, bfile, first_hunk, gp = values
2252 if gp:
2252 if gp:
2253 gp.path = pstrip(gp.path)
2253 gp.path = pstrip(gp.path)
2254 if gp.oldpath:
2254 if gp.oldpath:
2255 gp.oldpath = pstrip(gp.oldpath)
2255 gp.oldpath = pstrip(gp.oldpath)
2256 else:
2256 else:
2257 gp = makepatchmeta(
2257 gp = makepatchmeta(
2258 backend, afile, bfile, first_hunk, strip, prefix
2258 backend, afile, bfile, first_hunk, strip, prefix
2259 )
2259 )
2260 if gp.op == b'RENAME':
2260 if gp.op == b'RENAME':
2261 backend.unlink(gp.oldpath)
2261 backend.unlink(gp.oldpath)
2262 if not first_hunk:
2262 if not first_hunk:
2263 if gp.op == b'DELETE':
2263 if gp.op == b'DELETE':
2264 backend.unlink(gp.path)
2264 backend.unlink(gp.path)
2265 continue
2265 continue
2266 data, mode = None, None
2266 data, mode = None, None
2267 if gp.op in (b'RENAME', b'COPY'):
2267 if gp.op in (b'RENAME', b'COPY'):
2268 data, mode = store.getfile(gp.oldpath)[:2]
2268 data, mode = store.getfile(gp.oldpath)[:2]
2269 if data is None:
2269 if data is None:
2270 # This means that the old path does not exist
2270 # This means that the old path does not exist
2271 raise PatchError(
2271 raise PatchError(
2272 _(b"source file '%s' does not exist") % gp.oldpath
2272 _(b"source file '%s' does not exist") % gp.oldpath
2273 )
2273 )
2274 if gp.mode:
2274 if gp.mode:
2275 mode = gp.mode
2275 mode = gp.mode
2276 if gp.op == b'ADD':
2276 if gp.op == b'ADD':
2277 # Added files without content have no hunk and
2277 # Added files without content have no hunk and
2278 # must be created
2278 # must be created
2279 data = b''
2279 data = b''
2280 if data or mode:
2280 if data or mode:
2281 if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
2281 if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
2282 gp.path
2282 gp.path
2283 ):
2283 ):
2284 raise PatchError(
2284 raise PatchError(
2285 _(
2285 _(
2286 b"cannot create %s: destination "
2286 b"cannot create %s: destination "
2287 b"already exists"
2287 b"already exists"
2288 )
2288 )
2289 % gp.path
2289 % gp.path
2290 )
2290 )
2291 backend.setfile(gp.path, data, mode, gp.oldpath)
2291 backend.setfile(gp.path, data, mode, gp.oldpath)
2292 continue
2292 continue
2293 try:
2293 try:
2294 current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
2294 current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
2295 except PatchError as inst:
2295 except PatchError as inst:
2296 ui.warn(stringutil.forcebytestr(inst) + b'\n')
2296 ui.warn(stringutil.forcebytestr(inst) + b'\n')
2297 current_file = None
2297 current_file = None
2298 rejects += 1
2298 rejects += 1
2299 continue
2299 continue
2300 elif state == b'git':
2300 elif state == b'git':
2301 for gp in values:
2301 for gp in values:
2302 path = pstrip(gp.oldpath)
2302 path = pstrip(gp.oldpath)
2303 data, mode = backend.getfile(path)
2303 data, mode = backend.getfile(path)
2304 if data is None:
2304 if data is None:
2305 # The error ignored here will trigger a getfile()
2305 # The error ignored here will trigger a getfile()
2306 # error in a place more appropriate for error
2306 # error in a place more appropriate for error
2307 # handling, and will not interrupt the patching
2307 # handling, and will not interrupt the patching
2308 # process.
2308 # process.
2309 pass
2309 pass
2310 else:
2310 else:
2311 store.setfile(path, data, mode)
2311 store.setfile(path, data, mode)
2312 else:
2312 else:
2313 raise error.Abort(_(b'unsupported parser state: %s') % state)
2313 raise error.Abort(_(b'unsupported parser state: %s') % state)
2314
2314
2315 if current_file:
2315 if current_file:
2316 rejects += current_file.close()
2316 rejects += current_file.close()
2317
2317
2318 if rejects:
2318 if rejects:
2319 return -1
2319 return -1
2320 return err
2320 return err
2321
2321
2322
2322
2323 def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity):
2323 def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity):
2324 """use <patcher> to apply <patchname> to the working directory.
2324 """use <patcher> to apply <patchname> to the working directory.
2325 returns whether patch was applied with fuzz factor."""
2325 returns whether patch was applied with fuzz factor."""
2326
2326
2327 fuzz = False
2327 fuzz = False
2328 args = []
2328 args = []
2329 cwd = repo.root
2329 cwd = repo.root
2330 if cwd:
2330 if cwd:
2331 args.append(b'-d %s' % procutil.shellquote(cwd))
2331 args.append(b'-d %s' % procutil.shellquote(cwd))
2332 cmd = b'%s %s -p%d < %s' % (
2332 cmd = b'%s %s -p%d < %s' % (
2333 patcher,
2333 patcher,
2334 b' '.join(args),
2334 b' '.join(args),
2335 strip,
2335 strip,
2336 procutil.shellquote(patchname),
2336 procutil.shellquote(patchname),
2337 )
2337 )
2338 ui.debug(b'Using external patch tool: %s\n' % cmd)
2338 ui.debug(b'Using external patch tool: %s\n' % cmd)
2339 fp = procutil.popen(cmd, b'rb')
2339 fp = procutil.popen(cmd, b'rb')
2340 try:
2340 try:
2341 for line in util.iterfile(fp):
2341 for line in util.iterfile(fp):
2342 line = line.rstrip()
2342 line = line.rstrip()
2343 ui.note(line + b'\n')
2343 ui.note(line + b'\n')
2344 if line.startswith(b'patching file '):
2344 if line.startswith(b'patching file '):
2345 pf = util.parsepatchoutput(line)
2345 pf = util.parsepatchoutput(line)
2346 printed_file = False
2346 printed_file = False
2347 files.add(pf)
2347 files.add(pf)
2348 elif line.find(b'with fuzz') >= 0:
2348 elif line.find(b'with fuzz') >= 0:
2349 fuzz = True
2349 fuzz = True
2350 if not printed_file:
2350 if not printed_file:
2351 ui.warn(pf + b'\n')
2351 ui.warn(pf + b'\n')
2352 printed_file = True
2352 printed_file = True
2353 ui.warn(line + b'\n')
2353 ui.warn(line + b'\n')
2354 elif line.find(b'saving rejects to file') >= 0:
2354 elif line.find(b'saving rejects to file') >= 0:
2355 ui.warn(line + b'\n')
2355 ui.warn(line + b'\n')
2356 elif line.find(b'FAILED') >= 0:
2356 elif line.find(b'FAILED') >= 0:
2357 if not printed_file:
2357 if not printed_file:
2358 ui.warn(pf + b'\n')
2358 ui.warn(pf + b'\n')
2359 printed_file = True
2359 printed_file = True
2360 ui.warn(line + b'\n')
2360 ui.warn(line + b'\n')
2361 finally:
2361 finally:
2362 if files:
2362 if files:
2363 scmutil.marktouched(repo, files, similarity)
2363 scmutil.marktouched(repo, files, similarity)
2364 code = fp.close()
2364 code = fp.close()
2365 if code:
2365 if code:
2366 raise PatchError(
2366 raise PatchError(
2367 _(b"patch command failed: %s") % procutil.explainexit(code)
2367 _(b"patch command failed: %s") % procutil.explainexit(code)
2368 )
2368 )
2369 return fuzz
2369 return fuzz
2370
2370
2371
2371
2372 def patchbackend(
2372 def patchbackend(
2373 ui, backend, patchobj, strip, prefix, files=None, eolmode=b'strict'
2373 ui, backend, patchobj, strip, prefix, files=None, eolmode=b'strict'
2374 ):
2374 ):
2375 if files is None:
2375 if files is None:
2376 files = set()
2376 files = set()
2377 if eolmode is None:
2377 if eolmode is None:
2378 eolmode = ui.config(b'patch', b'eol')
2378 eolmode = ui.config(b'patch', b'eol')
2379 if eolmode.lower() not in eolmodes:
2379 if eolmode.lower() not in eolmodes:
2380 raise error.Abort(_(b'unsupported line endings type: %s') % eolmode)
2380 raise error.Abort(_(b'unsupported line endings type: %s') % eolmode)
2381 eolmode = eolmode.lower()
2381 eolmode = eolmode.lower()
2382
2382
2383 store = filestore()
2383 store = filestore()
2384 try:
2384 try:
2385 fp = open(patchobj, b'rb')
2385 fp = open(patchobj, b'rb')
2386 except TypeError:
2386 except TypeError:
2387 fp = patchobj
2387 fp = patchobj
2388 try:
2388 try:
2389 ret = applydiff(
2389 ret = applydiff(
2390 ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode
2390 ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode
2391 )
2391 )
2392 finally:
2392 finally:
2393 if fp != patchobj:
2393 if fp != patchobj:
2394 fp.close()
2394 fp.close()
2395 files.update(backend.close())
2395 files.update(backend.close())
2396 store.close()
2396 store.close()
2397 if ret < 0:
2397 if ret < 0:
2398 raise PatchError(_(b'patch failed to apply'))
2398 raise PatchError(_(b'patch failed to apply'))
2399 return ret > 0
2399 return ret > 0
2400
2400
2401
2401
2402 def internalpatch(
2402 def internalpatch(
2403 ui,
2403 ui,
2404 repo,
2404 repo,
2405 patchobj,
2405 patchobj,
2406 strip,
2406 strip,
2407 prefix=b'',
2407 prefix=b'',
2408 files=None,
2408 files=None,
2409 eolmode=b'strict',
2409 eolmode=b'strict',
2410 similarity=0,
2410 similarity=0,
2411 ):
2411 ):
2412 """use builtin patch to apply <patchobj> to the working directory.
2412 """use builtin patch to apply <patchobj> to the working directory.
2413 returns whether patch was applied with fuzz factor."""
2413 returns whether patch was applied with fuzz factor."""
2414 backend = workingbackend(ui, repo, similarity)
2414 backend = workingbackend(ui, repo, similarity)
2415 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2415 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2416
2416
2417
2417
2418 def patchrepo(
2418 def patchrepo(
2419 ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode=b'strict'
2419 ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode=b'strict'
2420 ):
2420 ):
2421 backend = repobackend(ui, repo, ctx, store)
2421 backend = repobackend(ui, repo, ctx, store)
2422 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2422 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2423
2423
2424
2424
2425 def patch(
2425 def patch(
2426 ui,
2426 ui,
2427 repo,
2427 repo,
2428 patchname,
2428 patchname,
2429 strip=1,
2429 strip=1,
2430 prefix=b'',
2430 prefix=b'',
2431 files=None,
2431 files=None,
2432 eolmode=b'strict',
2432 eolmode=b'strict',
2433 similarity=0,
2433 similarity=0,
2434 ):
2434 ):
2435 """Apply <patchname> to the working directory.
2435 """Apply <patchname> to the working directory.
2436
2436
2437 'eolmode' specifies how end of lines should be handled. It can be:
2437 'eolmode' specifies how end of lines should be handled. It can be:
2438 - 'strict': inputs are read in binary mode, EOLs are preserved
2438 - 'strict': inputs are read in binary mode, EOLs are preserved
2439 - 'crlf': EOLs are ignored when patching and reset to CRLF
2439 - 'crlf': EOLs are ignored when patching and reset to CRLF
2440 - 'lf': EOLs are ignored when patching and reset to LF
2440 - 'lf': EOLs are ignored when patching and reset to LF
2441 - None: get it from user settings, default to 'strict'
2441 - None: get it from user settings, default to 'strict'
2442 'eolmode' is ignored when using an external patcher program.
2442 'eolmode' is ignored when using an external patcher program.
2443
2443
2444 Returns whether patch was applied with fuzz factor.
2444 Returns whether patch was applied with fuzz factor.
2445 """
2445 """
2446 patcher = ui.config(b'ui', b'patch')
2446 patcher = ui.config(b'ui', b'patch')
2447 if files is None:
2447 if files is None:
2448 files = set()
2448 files = set()
2449 if patcher:
2449 if patcher:
2450 return _externalpatch(
2450 return _externalpatch(
2451 ui, repo, patcher, patchname, strip, files, similarity
2451 ui, repo, patcher, patchname, strip, files, similarity
2452 )
2452 )
2453 return internalpatch(
2453 return internalpatch(
2454 ui, repo, patchname, strip, prefix, files, eolmode, similarity
2454 ui, repo, patchname, strip, prefix, files, eolmode, similarity
2455 )
2455 )
2456
2456
2457
2457
2458 def changedfiles(ui, repo, patchpath, strip=1, prefix=b''):
2458 def changedfiles(ui, repo, patchpath, strip=1, prefix=b''):
2459 backend = fsbackend(ui, repo.root)
2459 backend = fsbackend(ui, repo.root)
2460 prefix = _canonprefix(repo, prefix)
2460 prefix = _canonprefix(repo, prefix)
2461 with open(patchpath, b'rb') as fp:
2461 with open(patchpath, b'rb') as fp:
2462 changed = set()
2462 changed = set()
2463 for state, values in iterhunks(fp):
2463 for state, values in iterhunks(fp):
2464 if state == b'file':
2464 if state == b'file':
2465 afile, bfile, first_hunk, gp = values
2465 afile, bfile, first_hunk, gp = values
2466 if gp:
2466 if gp:
2467 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2467 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2468 if gp.oldpath:
2468 if gp.oldpath:
2469 gp.oldpath = pathtransform(
2469 gp.oldpath = pathtransform(
2470 gp.oldpath, strip - 1, prefix
2470 gp.oldpath, strip - 1, prefix
2471 )[1]
2471 )[1]
2472 else:
2472 else:
2473 gp = makepatchmeta(
2473 gp = makepatchmeta(
2474 backend, afile, bfile, first_hunk, strip, prefix
2474 backend, afile, bfile, first_hunk, strip, prefix
2475 )
2475 )
2476 changed.add(gp.path)
2476 changed.add(gp.path)
2477 if gp.op == b'RENAME':
2477 if gp.op == b'RENAME':
2478 changed.add(gp.oldpath)
2478 changed.add(gp.oldpath)
2479 elif state not in (b'hunk', b'git'):
2479 elif state not in (b'hunk', b'git'):
2480 raise error.Abort(_(b'unsupported parser state: %s') % state)
2480 raise error.Abort(_(b'unsupported parser state: %s') % state)
2481 return changed
2481 return changed
2482
2482
2483
2483
2484 class GitDiffRequired(Exception):
2484 class GitDiffRequired(Exception):
2485 pass
2485 pass
2486
2486
2487
2487
2488 diffopts = diffutil.diffallopts
2488 diffopts = diffutil.diffallopts
2489 diffallopts = diffutil.diffallopts
2489 diffallopts = diffutil.diffallopts
2490 difffeatureopts = diffutil.difffeatureopts
2490 difffeatureopts = diffutil.difffeatureopts
2491
2491
2492
2492
2493 def diff(
2493 def diff(
2494 repo,
2494 repo,
2495 node1=None,
2495 node1=None,
2496 node2=None,
2496 node2=None,
2497 match=None,
2497 match=None,
2498 changes=None,
2498 changes=None,
2499 opts=None,
2499 opts=None,
2500 losedatafn=None,
2500 losedatafn=None,
2501 pathfn=None,
2501 pathfn=None,
2502 copy=None,
2502 copy=None,
2503 copysourcematch=None,
2503 copysourcematch=None,
2504 hunksfilterfn=None,
2504 hunksfilterfn=None,
2505 ):
2505 ):
2506 '''yields diff of changes to files between two nodes, or node and
2506 '''yields diff of changes to files between two nodes, or node and
2507 working directory.
2507 working directory.
2508
2508
2509 if node1 is None, use first dirstate parent instead.
2509 if node1 is None, use first dirstate parent instead.
2510 if node2 is None, compare node1 with working directory.
2510 if node2 is None, compare node1 with working directory.
2511
2511
2512 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2512 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2513 every time some change cannot be represented with the current
2513 every time some change cannot be represented with the current
2514 patch format. Return False to upgrade to git patch format, True to
2514 patch format. Return False to upgrade to git patch format, True to
2515 accept the loss or raise an exception to abort the diff. It is
2515 accept the loss or raise an exception to abort the diff. It is
2516 called with the name of current file being diffed as 'fn'. If set
2516 called with the name of current file being diffed as 'fn'. If set
2517 to None, patches will always be upgraded to git format when
2517 to None, patches will always be upgraded to git format when
2518 necessary.
2518 necessary.
2519
2519
2520 prefix is a filename prefix that is prepended to all filenames on
2520 prefix is a filename prefix that is prepended to all filenames on
2521 display (used for subrepos).
2521 display (used for subrepos).
2522
2522
2523 relroot, if not empty, must be normalized with a trailing /. Any match
2523 relroot, if not empty, must be normalized with a trailing /. Any match
2524 patterns that fall outside it will be ignored.
2524 patterns that fall outside it will be ignored.
2525
2525
2526 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2526 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2527 information.
2527 information.
2528
2528
2529 if copysourcematch is not None, then copy sources will be filtered by this
2529 if copysourcematch is not None, then copy sources will be filtered by this
2530 matcher
2530 matcher
2531
2531
2532 hunksfilterfn, if not None, should be a function taking a filectx and
2532 hunksfilterfn, if not None, should be a function taking a filectx and
2533 hunks generator that may yield filtered hunks.
2533 hunks generator that may yield filtered hunks.
2534 '''
2534 '''
2535 if not node1 and not node2:
2535 if not node1 and not node2:
2536 node1 = repo.dirstate.p1()
2536 node1 = repo.dirstate.p1()
2537
2537
2538 ctx1 = repo[node1]
2538 ctx1 = repo[node1]
2539 ctx2 = repo[node2]
2539 ctx2 = repo[node2]
2540
2540
2541 for fctx1, fctx2, hdr, hunks in diffhunks(
2541 for fctx1, fctx2, hdr, hunks in diffhunks(
2542 repo,
2542 repo,
2543 ctx1=ctx1,
2543 ctx1=ctx1,
2544 ctx2=ctx2,
2544 ctx2=ctx2,
2545 match=match,
2545 match=match,
2546 changes=changes,
2546 changes=changes,
2547 opts=opts,
2547 opts=opts,
2548 losedatafn=losedatafn,
2548 losedatafn=losedatafn,
2549 pathfn=pathfn,
2549 pathfn=pathfn,
2550 copy=copy,
2550 copy=copy,
2551 copysourcematch=copysourcematch,
2551 copysourcematch=copysourcematch,
2552 ):
2552 ):
2553 if hunksfilterfn is not None:
2553 if hunksfilterfn is not None:
2554 # If the file has been removed, fctx2 is None; but this should
2554 # If the file has been removed, fctx2 is None; but this should
2555 # not occur here since we catch removed files early in
2555 # not occur here since we catch removed files early in
2556 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2556 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2557 assert (
2557 assert (
2558 fctx2 is not None
2558 fctx2 is not None
2559 ), b'fctx2 unexpectly None in diff hunks filtering'
2559 ), b'fctx2 unexpectly None in diff hunks filtering'
2560 hunks = hunksfilterfn(fctx2, hunks)
2560 hunks = hunksfilterfn(fctx2, hunks)
2561 text = b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2561 text = b''.join(b''.join(hlines) for hrange, hlines in hunks)
2562 if hdr and (text or len(hdr) > 1):
2562 if hdr and (text or len(hdr) > 1):
2563 yield b'\n'.join(hdr) + b'\n'
2563 yield b'\n'.join(hdr) + b'\n'
2564 if text:
2564 if text:
2565 yield text
2565 yield text
2566
2566
2567
2567
2568 def diffhunks(
2568 def diffhunks(
2569 repo,
2569 repo,
2570 ctx1,
2570 ctx1,
2571 ctx2,
2571 ctx2,
2572 match=None,
2572 match=None,
2573 changes=None,
2573 changes=None,
2574 opts=None,
2574 opts=None,
2575 losedatafn=None,
2575 losedatafn=None,
2576 pathfn=None,
2576 pathfn=None,
2577 copy=None,
2577 copy=None,
2578 copysourcematch=None,
2578 copysourcematch=None,
2579 ):
2579 ):
2580 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2580 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2581 where `header` is a list of diff headers and `hunks` is an iterable of
2581 where `header` is a list of diff headers and `hunks` is an iterable of
2582 (`hunkrange`, `hunklines`) tuples.
2582 (`hunkrange`, `hunklines`) tuples.
2583
2583
2584 See diff() for the meaning of parameters.
2584 See diff() for the meaning of parameters.
2585 """
2585 """
2586
2586
2587 if opts is None:
2587 if opts is None:
2588 opts = mdiff.defaultopts
2588 opts = mdiff.defaultopts
2589
2589
2590 def lrugetfilectx():
2590 def lrugetfilectx():
2591 cache = {}
2591 cache = {}
2592 order = collections.deque()
2592 order = collections.deque()
2593
2593
2594 def getfilectx(f, ctx):
2594 def getfilectx(f, ctx):
2595 fctx = ctx.filectx(f, filelog=cache.get(f))
2595 fctx = ctx.filectx(f, filelog=cache.get(f))
2596 if f not in cache:
2596 if f not in cache:
2597 if len(cache) > 20:
2597 if len(cache) > 20:
2598 del cache[order.popleft()]
2598 del cache[order.popleft()]
2599 cache[f] = fctx.filelog()
2599 cache[f] = fctx.filelog()
2600 else:
2600 else:
2601 order.remove(f)
2601 order.remove(f)
2602 order.append(f)
2602 order.append(f)
2603 return fctx
2603 return fctx
2604
2604
2605 return getfilectx
2605 return getfilectx
2606
2606
2607 getfilectx = lrugetfilectx()
2607 getfilectx = lrugetfilectx()
2608
2608
2609 if not changes:
2609 if not changes:
2610 changes = ctx1.status(ctx2, match=match)
2610 changes = ctx1.status(ctx2, match=match)
2611 if isinstance(changes, list):
2611 if isinstance(changes, list):
2612 modified, added, removed = changes[:3]
2612 modified, added, removed = changes[:3]
2613 else:
2613 else:
2614 modified, added, removed = (
2614 modified, added, removed = (
2615 changes.modified,
2615 changes.modified,
2616 changes.added,
2616 changes.added,
2617 changes.removed,
2617 changes.removed,
2618 )
2618 )
2619
2619
2620 if not modified and not added and not removed:
2620 if not modified and not added and not removed:
2621 return []
2621 return []
2622
2622
2623 if repo.ui.debugflag:
2623 if repo.ui.debugflag:
2624 hexfunc = hex
2624 hexfunc = hex
2625 else:
2625 else:
2626 hexfunc = short
2626 hexfunc = short
2627 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2627 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2628
2628
2629 if copy is None:
2629 if copy is None:
2630 copy = {}
2630 copy = {}
2631 if opts.git or opts.upgrade:
2631 if opts.git or opts.upgrade:
2632 copy = copies.pathcopies(ctx1, ctx2, match=match)
2632 copy = copies.pathcopies(ctx1, ctx2, match=match)
2633
2633
2634 if copysourcematch:
2634 if copysourcematch:
2635 # filter out copies where source side isn't inside the matcher
2635 # filter out copies where source side isn't inside the matcher
2636 # (copies.pathcopies() already filtered out the destination)
2636 # (copies.pathcopies() already filtered out the destination)
2637 copy = {
2637 copy = {
2638 dst: src
2638 dst: src
2639 for dst, src in pycompat.iteritems(copy)
2639 for dst, src in pycompat.iteritems(copy)
2640 if copysourcematch(src)
2640 if copysourcematch(src)
2641 }
2641 }
2642
2642
2643 modifiedset = set(modified)
2643 modifiedset = set(modified)
2644 addedset = set(added)
2644 addedset = set(added)
2645 removedset = set(removed)
2645 removedset = set(removed)
2646 for f in modified:
2646 for f in modified:
2647 if f not in ctx1:
2647 if f not in ctx1:
2648 # Fix up added, since merged-in additions appear as
2648 # Fix up added, since merged-in additions appear as
2649 # modifications during merges
2649 # modifications during merges
2650 modifiedset.remove(f)
2650 modifiedset.remove(f)
2651 addedset.add(f)
2651 addedset.add(f)
2652 for f in removed:
2652 for f in removed:
2653 if f not in ctx1:
2653 if f not in ctx1:
2654 # Merged-in additions that are then removed are reported as removed.
2654 # Merged-in additions that are then removed are reported as removed.
2655 # They are not in ctx1, so We don't want to show them in the diff.
2655 # They are not in ctx1, so We don't want to show them in the diff.
2656 removedset.remove(f)
2656 removedset.remove(f)
2657 modified = sorted(modifiedset)
2657 modified = sorted(modifiedset)
2658 added = sorted(addedset)
2658 added = sorted(addedset)
2659 removed = sorted(removedset)
2659 removed = sorted(removedset)
2660 for dst, src in list(copy.items()):
2660 for dst, src in list(copy.items()):
2661 if src not in ctx1:
2661 if src not in ctx1:
2662 # Files merged in during a merge and then copied/renamed are
2662 # Files merged in during a merge and then copied/renamed are
2663 # reported as copies. We want to show them in the diff as additions.
2663 # reported as copies. We want to show them in the diff as additions.
2664 del copy[dst]
2664 del copy[dst]
2665
2665
2666 prefetchmatch = scmutil.matchfiles(
2666 prefetchmatch = scmutil.matchfiles(
2667 repo, list(modifiedset | addedset | removedset)
2667 repo, list(modifiedset | addedset | removedset)
2668 )
2668 )
2669 scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
2669 scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
2670
2670
2671 def difffn(opts, losedata):
2671 def difffn(opts, losedata):
2672 return trydiff(
2672 return trydiff(
2673 repo,
2673 repo,
2674 revs,
2674 revs,
2675 ctx1,
2675 ctx1,
2676 ctx2,
2676 ctx2,
2677 modified,
2677 modified,
2678 added,
2678 added,
2679 removed,
2679 removed,
2680 copy,
2680 copy,
2681 getfilectx,
2681 getfilectx,
2682 opts,
2682 opts,
2683 losedata,
2683 losedata,
2684 pathfn,
2684 pathfn,
2685 )
2685 )
2686
2686
2687 if opts.upgrade and not opts.git:
2687 if opts.upgrade and not opts.git:
2688 try:
2688 try:
2689
2689
2690 def losedata(fn):
2690 def losedata(fn):
2691 if not losedatafn or not losedatafn(fn=fn):
2691 if not losedatafn or not losedatafn(fn=fn):
2692 raise GitDiffRequired
2692 raise GitDiffRequired
2693
2693
2694 # Buffer the whole output until we are sure it can be generated
2694 # Buffer the whole output until we are sure it can be generated
2695 return list(difffn(opts.copy(git=False), losedata))
2695 return list(difffn(opts.copy(git=False), losedata))
2696 except GitDiffRequired:
2696 except GitDiffRequired:
2697 return difffn(opts.copy(git=True), None)
2697 return difffn(opts.copy(git=True), None)
2698 else:
2698 else:
2699 return difffn(opts, None)
2699 return difffn(opts, None)
2700
2700
2701
2701
2702 def diffsinglehunk(hunklines):
2702 def diffsinglehunk(hunklines):
2703 """yield tokens for a list of lines in a single hunk"""
2703 """yield tokens for a list of lines in a single hunk"""
2704 for line in hunklines:
2704 for line in hunklines:
2705 # chomp
2705 # chomp
2706 chompline = line.rstrip(b'\r\n')
2706 chompline = line.rstrip(b'\r\n')
2707 # highlight tabs and trailing whitespace
2707 # highlight tabs and trailing whitespace
2708 stripline = chompline.rstrip()
2708 stripline = chompline.rstrip()
2709 if line.startswith(b'-'):
2709 if line.startswith(b'-'):
2710 label = b'diff.deleted'
2710 label = b'diff.deleted'
2711 elif line.startswith(b'+'):
2711 elif line.startswith(b'+'):
2712 label = b'diff.inserted'
2712 label = b'diff.inserted'
2713 else:
2713 else:
2714 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2714 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2715 for token in tabsplitter.findall(stripline):
2715 for token in tabsplitter.findall(stripline):
2716 if token.startswith(b'\t'):
2716 if token.startswith(b'\t'):
2717 yield (token, b'diff.tab')
2717 yield (token, b'diff.tab')
2718 else:
2718 else:
2719 yield (token, label)
2719 yield (token, label)
2720
2720
2721 if chompline != stripline:
2721 if chompline != stripline:
2722 yield (chompline[len(stripline) :], b'diff.trailingwhitespace')
2722 yield (chompline[len(stripline) :], b'diff.trailingwhitespace')
2723 if chompline != line:
2723 if chompline != line:
2724 yield (line[len(chompline) :], b'')
2724 yield (line[len(chompline) :], b'')
2725
2725
2726
2726
2727 def diffsinglehunkinline(hunklines):
2727 def diffsinglehunkinline(hunklines):
2728 """yield tokens for a list of lines in a single hunk, with inline colors"""
2728 """yield tokens for a list of lines in a single hunk, with inline colors"""
2729 # prepare deleted, and inserted content
2729 # prepare deleted, and inserted content
2730 a = b''
2730 a = b''
2731 b = b''
2731 b = b''
2732 for line in hunklines:
2732 for line in hunklines:
2733 if line[0:1] == b'-':
2733 if line[0:1] == b'-':
2734 a += line[1:]
2734 a += line[1:]
2735 elif line[0:1] == b'+':
2735 elif line[0:1] == b'+':
2736 b += line[1:]
2736 b += line[1:]
2737 else:
2737 else:
2738 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2738 raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
2739 # fast path: if either side is empty, use diffsinglehunk
2739 # fast path: if either side is empty, use diffsinglehunk
2740 if not a or not b:
2740 if not a or not b:
2741 for t in diffsinglehunk(hunklines):
2741 for t in diffsinglehunk(hunklines):
2742 yield t
2742 yield t
2743 return
2743 return
2744 # re-split the content into words
2744 # re-split the content into words
2745 al = wordsplitter.findall(a)
2745 al = wordsplitter.findall(a)
2746 bl = wordsplitter.findall(b)
2746 bl = wordsplitter.findall(b)
2747 # re-arrange the words to lines since the diff algorithm is line-based
2747 # re-arrange the words to lines since the diff algorithm is line-based
2748 aln = [s if s == b'\n' else s + b'\n' for s in al]
2748 aln = [s if s == b'\n' else s + b'\n' for s in al]
2749 bln = [s if s == b'\n' else s + b'\n' for s in bl]
2749 bln = [s if s == b'\n' else s + b'\n' for s in bl]
2750 an = b''.join(aln)
2750 an = b''.join(aln)
2751 bn = b''.join(bln)
2751 bn = b''.join(bln)
2752 # run the diff algorithm, prepare atokens and btokens
2752 # run the diff algorithm, prepare atokens and btokens
2753 atokens = []
2753 atokens = []
2754 btokens = []
2754 btokens = []
2755 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2755 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2756 for (a1, a2, b1, b2), btype in blocks:
2756 for (a1, a2, b1, b2), btype in blocks:
2757 changed = btype == b'!'
2757 changed = btype == b'!'
2758 for token in mdiff.splitnewlines(b''.join(al[a1:a2])):
2758 for token in mdiff.splitnewlines(b''.join(al[a1:a2])):
2759 atokens.append((changed, token))
2759 atokens.append((changed, token))
2760 for token in mdiff.splitnewlines(b''.join(bl[b1:b2])):
2760 for token in mdiff.splitnewlines(b''.join(bl[b1:b2])):
2761 btokens.append((changed, token))
2761 btokens.append((changed, token))
2762
2762
2763 # yield deleted tokens, then inserted ones
2763 # yield deleted tokens, then inserted ones
2764 for prefix, label, tokens in [
2764 for prefix, label, tokens in [
2765 (b'-', b'diff.deleted', atokens),
2765 (b'-', b'diff.deleted', atokens),
2766 (b'+', b'diff.inserted', btokens),
2766 (b'+', b'diff.inserted', btokens),
2767 ]:
2767 ]:
2768 nextisnewline = True
2768 nextisnewline = True
2769 for changed, token in tokens:
2769 for changed, token in tokens:
2770 if nextisnewline:
2770 if nextisnewline:
2771 yield (prefix, label)
2771 yield (prefix, label)
2772 nextisnewline = False
2772 nextisnewline = False
2773 # special handling line end
2773 # special handling line end
2774 isendofline = token.endswith(b'\n')
2774 isendofline = token.endswith(b'\n')
2775 if isendofline:
2775 if isendofline:
2776 chomp = token[:-1] # chomp
2776 chomp = token[:-1] # chomp
2777 if chomp.endswith(b'\r'):
2777 if chomp.endswith(b'\r'):
2778 chomp = chomp[:-1]
2778 chomp = chomp[:-1]
2779 endofline = token[len(chomp) :]
2779 endofline = token[len(chomp) :]
2780 token = chomp.rstrip() # detect spaces at the end
2780 token = chomp.rstrip() # detect spaces at the end
2781 endspaces = chomp[len(token) :]
2781 endspaces = chomp[len(token) :]
2782 # scan tabs
2782 # scan tabs
2783 for maybetab in tabsplitter.findall(token):
2783 for maybetab in tabsplitter.findall(token):
2784 if b'\t' == maybetab[0:1]:
2784 if b'\t' == maybetab[0:1]:
2785 currentlabel = b'diff.tab'
2785 currentlabel = b'diff.tab'
2786 else:
2786 else:
2787 if changed:
2787 if changed:
2788 currentlabel = label + b'.changed'
2788 currentlabel = label + b'.changed'
2789 else:
2789 else:
2790 currentlabel = label + b'.unchanged'
2790 currentlabel = label + b'.unchanged'
2791 yield (maybetab, currentlabel)
2791 yield (maybetab, currentlabel)
2792 if isendofline:
2792 if isendofline:
2793 if endspaces:
2793 if endspaces:
2794 yield (endspaces, b'diff.trailingwhitespace')
2794 yield (endspaces, b'diff.trailingwhitespace')
2795 yield (endofline, b'')
2795 yield (endofline, b'')
2796 nextisnewline = True
2796 nextisnewline = True
2797
2797
2798
2798
2799 def difflabel(func, *args, **kw):
2799 def difflabel(func, *args, **kw):
2800 '''yields 2-tuples of (output, label) based on the output of func()'''
2800 '''yields 2-tuples of (output, label) based on the output of func()'''
2801 if kw.get('opts') and kw['opts'].worddiff:
2801 if kw.get('opts') and kw['opts'].worddiff:
2802 dodiffhunk = diffsinglehunkinline
2802 dodiffhunk = diffsinglehunkinline
2803 else:
2803 else:
2804 dodiffhunk = diffsinglehunk
2804 dodiffhunk = diffsinglehunk
2805 headprefixes = [
2805 headprefixes = [
2806 (b'diff', b'diff.diffline'),
2806 (b'diff', b'diff.diffline'),
2807 (b'copy', b'diff.extended'),
2807 (b'copy', b'diff.extended'),
2808 (b'rename', b'diff.extended'),
2808 (b'rename', b'diff.extended'),
2809 (b'old', b'diff.extended'),
2809 (b'old', b'diff.extended'),
2810 (b'new', b'diff.extended'),
2810 (b'new', b'diff.extended'),
2811 (b'deleted', b'diff.extended'),
2811 (b'deleted', b'diff.extended'),
2812 (b'index', b'diff.extended'),
2812 (b'index', b'diff.extended'),
2813 (b'similarity', b'diff.extended'),
2813 (b'similarity', b'diff.extended'),
2814 (b'---', b'diff.file_a'),
2814 (b'---', b'diff.file_a'),
2815 (b'+++', b'diff.file_b'),
2815 (b'+++', b'diff.file_b'),
2816 ]
2816 ]
2817 textprefixes = [
2817 textprefixes = [
2818 (b'@', b'diff.hunk'),
2818 (b'@', b'diff.hunk'),
2819 # - and + are handled by diffsinglehunk
2819 # - and + are handled by diffsinglehunk
2820 ]
2820 ]
2821 head = False
2821 head = False
2822
2822
2823 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2823 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2824 hunkbuffer = []
2824 hunkbuffer = []
2825
2825
2826 def consumehunkbuffer():
2826 def consumehunkbuffer():
2827 if hunkbuffer:
2827 if hunkbuffer:
2828 for token in dodiffhunk(hunkbuffer):
2828 for token in dodiffhunk(hunkbuffer):
2829 yield token
2829 yield token
2830 hunkbuffer[:] = []
2830 hunkbuffer[:] = []
2831
2831
2832 for chunk in func(*args, **kw):
2832 for chunk in func(*args, **kw):
2833 lines = chunk.split(b'\n')
2833 lines = chunk.split(b'\n')
2834 linecount = len(lines)
2834 linecount = len(lines)
2835 for i, line in enumerate(lines):
2835 for i, line in enumerate(lines):
2836 if head:
2836 if head:
2837 if line.startswith(b'@'):
2837 if line.startswith(b'@'):
2838 head = False
2838 head = False
2839 else:
2839 else:
2840 if line and not line.startswith(
2840 if line and not line.startswith(
2841 (b' ', b'+', b'-', b'@', b'\\')
2841 (b' ', b'+', b'-', b'@', b'\\')
2842 ):
2842 ):
2843 head = True
2843 head = True
2844 diffline = False
2844 diffline = False
2845 if not head and line and line.startswith((b'+', b'-')):
2845 if not head and line and line.startswith((b'+', b'-')):
2846 diffline = True
2846 diffline = True
2847
2847
2848 prefixes = textprefixes
2848 prefixes = textprefixes
2849 if head:
2849 if head:
2850 prefixes = headprefixes
2850 prefixes = headprefixes
2851 if diffline:
2851 if diffline:
2852 # buffered
2852 # buffered
2853 bufferedline = line
2853 bufferedline = line
2854 if i + 1 < linecount:
2854 if i + 1 < linecount:
2855 bufferedline += b"\n"
2855 bufferedline += b"\n"
2856 hunkbuffer.append(bufferedline)
2856 hunkbuffer.append(bufferedline)
2857 else:
2857 else:
2858 # unbuffered
2858 # unbuffered
2859 for token in consumehunkbuffer():
2859 for token in consumehunkbuffer():
2860 yield token
2860 yield token
2861 stripline = line.rstrip()
2861 stripline = line.rstrip()
2862 for prefix, label in prefixes:
2862 for prefix, label in prefixes:
2863 if stripline.startswith(prefix):
2863 if stripline.startswith(prefix):
2864 yield (stripline, label)
2864 yield (stripline, label)
2865 if line != stripline:
2865 if line != stripline:
2866 yield (
2866 yield (
2867 line[len(stripline) :],
2867 line[len(stripline) :],
2868 b'diff.trailingwhitespace',
2868 b'diff.trailingwhitespace',
2869 )
2869 )
2870 break
2870 break
2871 else:
2871 else:
2872 yield (line, b'')
2872 yield (line, b'')
2873 if i + 1 < linecount:
2873 if i + 1 < linecount:
2874 yield (b'\n', b'')
2874 yield (b'\n', b'')
2875 for token in consumehunkbuffer():
2875 for token in consumehunkbuffer():
2876 yield token
2876 yield token
2877
2877
2878
2878
2879 def diffui(*args, **kw):
2879 def diffui(*args, **kw):
2880 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2880 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2881 return difflabel(diff, *args, **kw)
2881 return difflabel(diff, *args, **kw)
2882
2882
2883
2883
2884 def _filepairs(modified, added, removed, copy, opts):
2884 def _filepairs(modified, added, removed, copy, opts):
2885 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2885 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2886 before and f2 is the the name after. For added files, f1 will be None,
2886 before and f2 is the the name after. For added files, f1 will be None,
2887 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2887 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2888 or 'rename' (the latter two only if opts.git is set).'''
2888 or 'rename' (the latter two only if opts.git is set).'''
2889 gone = set()
2889 gone = set()
2890
2890
2891 copyto = {v: k for k, v in copy.items()}
2891 copyto = {v: k for k, v in copy.items()}
2892
2892
2893 addedset, removedset = set(added), set(removed)
2893 addedset, removedset = set(added), set(removed)
2894
2894
2895 for f in sorted(modified + added + removed):
2895 for f in sorted(modified + added + removed):
2896 copyop = None
2896 copyop = None
2897 f1, f2 = f, f
2897 f1, f2 = f, f
2898 if f in addedset:
2898 if f in addedset:
2899 f1 = None
2899 f1 = None
2900 if f in copy:
2900 if f in copy:
2901 if opts.git:
2901 if opts.git:
2902 f1 = copy[f]
2902 f1 = copy[f]
2903 if f1 in removedset and f1 not in gone:
2903 if f1 in removedset and f1 not in gone:
2904 copyop = b'rename'
2904 copyop = b'rename'
2905 gone.add(f1)
2905 gone.add(f1)
2906 else:
2906 else:
2907 copyop = b'copy'
2907 copyop = b'copy'
2908 elif f in removedset:
2908 elif f in removedset:
2909 f2 = None
2909 f2 = None
2910 if opts.git:
2910 if opts.git:
2911 # have we already reported a copy above?
2911 # have we already reported a copy above?
2912 if (
2912 if (
2913 f in copyto
2913 f in copyto
2914 and copyto[f] in addedset
2914 and copyto[f] in addedset
2915 and copy[copyto[f]] == f
2915 and copy[copyto[f]] == f
2916 ):
2916 ):
2917 continue
2917 continue
2918 yield f1, f2, copyop
2918 yield f1, f2, copyop
2919
2919
2920
2920
2921 def trydiff(
2921 def trydiff(
2922 repo,
2922 repo,
2923 revs,
2923 revs,
2924 ctx1,
2924 ctx1,
2925 ctx2,
2925 ctx2,
2926 modified,
2926 modified,
2927 added,
2927 added,
2928 removed,
2928 removed,
2929 copy,
2929 copy,
2930 getfilectx,
2930 getfilectx,
2931 opts,
2931 opts,
2932 losedatafn,
2932 losedatafn,
2933 pathfn,
2933 pathfn,
2934 ):
2934 ):
2935 '''given input data, generate a diff and yield it in blocks
2935 '''given input data, generate a diff and yield it in blocks
2936
2936
2937 If generating a diff would lose data like flags or binary data and
2937 If generating a diff would lose data like flags or binary data and
2938 losedatafn is not None, it will be called.
2938 losedatafn is not None, it will be called.
2939
2939
2940 pathfn is applied to every path in the diff output.
2940 pathfn is applied to every path in the diff output.
2941 '''
2941 '''
2942
2942
2943 def gitindex(text):
2943 def gitindex(text):
2944 if not text:
2944 if not text:
2945 text = b""
2945 text = b""
2946 l = len(text)
2946 l = len(text)
2947 s = hashutil.sha1(b'blob %d\0' % l)
2947 s = hashutil.sha1(b'blob %d\0' % l)
2948 s.update(text)
2948 s.update(text)
2949 return hex(s.digest())
2949 return hex(s.digest())
2950
2950
2951 if opts.noprefix:
2951 if opts.noprefix:
2952 aprefix = bprefix = b''
2952 aprefix = bprefix = b''
2953 else:
2953 else:
2954 aprefix = b'a/'
2954 aprefix = b'a/'
2955 bprefix = b'b/'
2955 bprefix = b'b/'
2956
2956
2957 def diffline(f, revs):
2957 def diffline(f, revs):
2958 revinfo = b' '.join([b"-r %s" % rev for rev in revs])
2958 revinfo = b' '.join([b"-r %s" % rev for rev in revs])
2959 return b'diff %s %s' % (revinfo, f)
2959 return b'diff %s %s' % (revinfo, f)
2960
2960
2961 def isempty(fctx):
2961 def isempty(fctx):
2962 return fctx is None or fctx.size() == 0
2962 return fctx is None or fctx.size() == 0
2963
2963
2964 date1 = dateutil.datestr(ctx1.date())
2964 date1 = dateutil.datestr(ctx1.date())
2965 date2 = dateutil.datestr(ctx2.date())
2965 date2 = dateutil.datestr(ctx2.date())
2966
2966
2967 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
2967 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
2968
2968
2969 if not pathfn:
2969 if not pathfn:
2970 pathfn = lambda f: f
2970 pathfn = lambda f: f
2971
2971
2972 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2972 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2973 content1 = None
2973 content1 = None
2974 content2 = None
2974 content2 = None
2975 fctx1 = None
2975 fctx1 = None
2976 fctx2 = None
2976 fctx2 = None
2977 flag1 = None
2977 flag1 = None
2978 flag2 = None
2978 flag2 = None
2979 if f1:
2979 if f1:
2980 fctx1 = getfilectx(f1, ctx1)
2980 fctx1 = getfilectx(f1, ctx1)
2981 if opts.git or losedatafn:
2981 if opts.git or losedatafn:
2982 flag1 = ctx1.flags(f1)
2982 flag1 = ctx1.flags(f1)
2983 if f2:
2983 if f2:
2984 fctx2 = getfilectx(f2, ctx2)
2984 fctx2 = getfilectx(f2, ctx2)
2985 if opts.git or losedatafn:
2985 if opts.git or losedatafn:
2986 flag2 = ctx2.flags(f2)
2986 flag2 = ctx2.flags(f2)
2987 # if binary is True, output "summary" or "base85", but not "text diff"
2987 # if binary is True, output "summary" or "base85", but not "text diff"
2988 if opts.text:
2988 if opts.text:
2989 binary = False
2989 binary = False
2990 else:
2990 else:
2991 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2991 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2992
2992
2993 if losedatafn and not opts.git:
2993 if losedatafn and not opts.git:
2994 if (
2994 if (
2995 binary
2995 binary
2996 or
2996 or
2997 # copy/rename
2997 # copy/rename
2998 f2 in copy
2998 f2 in copy
2999 or
2999 or
3000 # empty file creation
3000 # empty file creation
3001 (not f1 and isempty(fctx2))
3001 (not f1 and isempty(fctx2))
3002 or
3002 or
3003 # empty file deletion
3003 # empty file deletion
3004 (isempty(fctx1) and not f2)
3004 (isempty(fctx1) and not f2)
3005 or
3005 or
3006 # create with flags
3006 # create with flags
3007 (not f1 and flag2)
3007 (not f1 and flag2)
3008 or
3008 or
3009 # change flags
3009 # change flags
3010 (f1 and f2 and flag1 != flag2)
3010 (f1 and f2 and flag1 != flag2)
3011 ):
3011 ):
3012 losedatafn(f2 or f1)
3012 losedatafn(f2 or f1)
3013
3013
3014 path1 = pathfn(f1 or f2)
3014 path1 = pathfn(f1 or f2)
3015 path2 = pathfn(f2 or f1)
3015 path2 = pathfn(f2 or f1)
3016 header = []
3016 header = []
3017 if opts.git:
3017 if opts.git:
3018 header.append(
3018 header.append(
3019 b'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
3019 b'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
3020 )
3020 )
3021 if not f1: # added
3021 if not f1: # added
3022 header.append(b'new file mode %s' % gitmode[flag2])
3022 header.append(b'new file mode %s' % gitmode[flag2])
3023 elif not f2: # removed
3023 elif not f2: # removed
3024 header.append(b'deleted file mode %s' % gitmode[flag1])
3024 header.append(b'deleted file mode %s' % gitmode[flag1])
3025 else: # modified/copied/renamed
3025 else: # modified/copied/renamed
3026 mode1, mode2 = gitmode[flag1], gitmode[flag2]
3026 mode1, mode2 = gitmode[flag1], gitmode[flag2]
3027 if mode1 != mode2:
3027 if mode1 != mode2:
3028 header.append(b'old mode %s' % mode1)
3028 header.append(b'old mode %s' % mode1)
3029 header.append(b'new mode %s' % mode2)
3029 header.append(b'new mode %s' % mode2)
3030 if copyop is not None:
3030 if copyop is not None:
3031 if opts.showsimilarity:
3031 if opts.showsimilarity:
3032 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
3032 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
3033 header.append(b'similarity index %d%%' % sim)
3033 header.append(b'similarity index %d%%' % sim)
3034 header.append(b'%s from %s' % (copyop, path1))
3034 header.append(b'%s from %s' % (copyop, path1))
3035 header.append(b'%s to %s' % (copyop, path2))
3035 header.append(b'%s to %s' % (copyop, path2))
3036 elif revs:
3036 elif revs:
3037 header.append(diffline(path1, revs))
3037 header.append(diffline(path1, revs))
3038
3038
3039 # fctx.is | diffopts | what to | is fctx.data()
3039 # fctx.is | diffopts | what to | is fctx.data()
3040 # binary() | text nobinary git index | output? | outputted?
3040 # binary() | text nobinary git index | output? | outputted?
3041 # ------------------------------------|----------------------------
3041 # ------------------------------------|----------------------------
3042 # yes | no no no * | summary | no
3042 # yes | no no no * | summary | no
3043 # yes | no no yes * | base85 | yes
3043 # yes | no no yes * | base85 | yes
3044 # yes | no yes no * | summary | no
3044 # yes | no yes no * | summary | no
3045 # yes | no yes yes 0 | summary | no
3045 # yes | no yes yes 0 | summary | no
3046 # yes | no yes yes >0 | summary | semi [1]
3046 # yes | no yes yes >0 | summary | semi [1]
3047 # yes | yes * * * | text diff | yes
3047 # yes | yes * * * | text diff | yes
3048 # no | * * * * | text diff | yes
3048 # no | * * * * | text diff | yes
3049 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
3049 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
3050 if binary and (
3050 if binary and (
3051 not opts.git or (opts.git and opts.nobinary and not opts.index)
3051 not opts.git or (opts.git and opts.nobinary and not opts.index)
3052 ):
3052 ):
3053 # fast path: no binary content will be displayed, content1 and
3053 # fast path: no binary content will be displayed, content1 and
3054 # content2 are only used for equivalent test. cmp() could have a
3054 # content2 are only used for equivalent test. cmp() could have a
3055 # fast path.
3055 # fast path.
3056 if fctx1 is not None:
3056 if fctx1 is not None:
3057 content1 = b'\0'
3057 content1 = b'\0'
3058 if fctx2 is not None:
3058 if fctx2 is not None:
3059 if fctx1 is not None and not fctx1.cmp(fctx2):
3059 if fctx1 is not None and not fctx1.cmp(fctx2):
3060 content2 = b'\0' # not different
3060 content2 = b'\0' # not different
3061 else:
3061 else:
3062 content2 = b'\0\0'
3062 content2 = b'\0\0'
3063 else:
3063 else:
3064 # normal path: load contents
3064 # normal path: load contents
3065 if fctx1 is not None:
3065 if fctx1 is not None:
3066 content1 = fctx1.data()
3066 content1 = fctx1.data()
3067 if fctx2 is not None:
3067 if fctx2 is not None:
3068 content2 = fctx2.data()
3068 content2 = fctx2.data()
3069
3069
3070 if binary and opts.git and not opts.nobinary:
3070 if binary and opts.git and not opts.nobinary:
3071 text = mdiff.b85diff(content1, content2)
3071 text = mdiff.b85diff(content1, content2)
3072 if text:
3072 if text:
3073 header.append(
3073 header.append(
3074 b'index %s..%s' % (gitindex(content1), gitindex(content2))
3074 b'index %s..%s' % (gitindex(content1), gitindex(content2))
3075 )
3075 )
3076 hunks = ((None, [text]),)
3076 hunks = ((None, [text]),)
3077 else:
3077 else:
3078 if opts.git and opts.index > 0:
3078 if opts.git and opts.index > 0:
3079 flag = flag1
3079 flag = flag1
3080 if flag is None:
3080 if flag is None:
3081 flag = flag2
3081 flag = flag2
3082 header.append(
3082 header.append(
3083 b'index %s..%s %s'
3083 b'index %s..%s %s'
3084 % (
3084 % (
3085 gitindex(content1)[0 : opts.index],
3085 gitindex(content1)[0 : opts.index],
3086 gitindex(content2)[0 : opts.index],
3086 gitindex(content2)[0 : opts.index],
3087 gitmode[flag],
3087 gitmode[flag],
3088 )
3088 )
3089 )
3089 )
3090
3090
3091 uheaders, hunks = mdiff.unidiff(
3091 uheaders, hunks = mdiff.unidiff(
3092 content1,
3092 content1,
3093 date1,
3093 date1,
3094 content2,
3094 content2,
3095 date2,
3095 date2,
3096 path1,
3096 path1,
3097 path2,
3097 path2,
3098 binary=binary,
3098 binary=binary,
3099 opts=opts,
3099 opts=opts,
3100 )
3100 )
3101 header.extend(uheaders)
3101 header.extend(uheaders)
3102 yield fctx1, fctx2, header, hunks
3102 yield fctx1, fctx2, header, hunks
3103
3103
3104
3104
3105 def diffstatsum(stats):
3105 def diffstatsum(stats):
3106 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
3106 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
3107 for f, a, r, b in stats:
3107 for f, a, r, b in stats:
3108 maxfile = max(maxfile, encoding.colwidth(f))
3108 maxfile = max(maxfile, encoding.colwidth(f))
3109 maxtotal = max(maxtotal, a + r)
3109 maxtotal = max(maxtotal, a + r)
3110 addtotal += a
3110 addtotal += a
3111 removetotal += r
3111 removetotal += r
3112 binary = binary or b
3112 binary = binary or b
3113
3113
3114 return maxfile, maxtotal, addtotal, removetotal, binary
3114 return maxfile, maxtotal, addtotal, removetotal, binary
3115
3115
3116
3116
3117 def diffstatdata(lines):
3117 def diffstatdata(lines):
3118 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
3118 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
3119
3119
3120 results = []
3120 results = []
3121 filename, adds, removes, isbinary = None, 0, 0, False
3121 filename, adds, removes, isbinary = None, 0, 0, False
3122
3122
3123 def addresult():
3123 def addresult():
3124 if filename:
3124 if filename:
3125 results.append((filename, adds, removes, isbinary))
3125 results.append((filename, adds, removes, isbinary))
3126
3126
3127 # inheader is used to track if a line is in the
3127 # inheader is used to track if a line is in the
3128 # header portion of the diff. This helps properly account
3128 # header portion of the diff. This helps properly account
3129 # for lines that start with '--' or '++'
3129 # for lines that start with '--' or '++'
3130 inheader = False
3130 inheader = False
3131
3131
3132 for line in lines:
3132 for line in lines:
3133 if line.startswith(b'diff'):
3133 if line.startswith(b'diff'):
3134 addresult()
3134 addresult()
3135 # starting a new file diff
3135 # starting a new file diff
3136 # set numbers to 0 and reset inheader
3136 # set numbers to 0 and reset inheader
3137 inheader = True
3137 inheader = True
3138 adds, removes, isbinary = 0, 0, False
3138 adds, removes, isbinary = 0, 0, False
3139 if line.startswith(b'diff --git a/'):
3139 if line.startswith(b'diff --git a/'):
3140 filename = gitre.search(line).group(2)
3140 filename = gitre.search(line).group(2)
3141 elif line.startswith(b'diff -r'):
3141 elif line.startswith(b'diff -r'):
3142 # format: "diff -r ... -r ... filename"
3142 # format: "diff -r ... -r ... filename"
3143 filename = diffre.search(line).group(1)
3143 filename = diffre.search(line).group(1)
3144 elif line.startswith(b'@@'):
3144 elif line.startswith(b'@@'):
3145 inheader = False
3145 inheader = False
3146 elif line.startswith(b'+') and not inheader:
3146 elif line.startswith(b'+') and not inheader:
3147 adds += 1
3147 adds += 1
3148 elif line.startswith(b'-') and not inheader:
3148 elif line.startswith(b'-') and not inheader:
3149 removes += 1
3149 removes += 1
3150 elif line.startswith(b'GIT binary patch') or line.startswith(
3150 elif line.startswith(b'GIT binary patch') or line.startswith(
3151 b'Binary file'
3151 b'Binary file'
3152 ):
3152 ):
3153 isbinary = True
3153 isbinary = True
3154 elif line.startswith(b'rename from'):
3154 elif line.startswith(b'rename from'):
3155 filename = line[12:]
3155 filename = line[12:]
3156 elif line.startswith(b'rename to'):
3156 elif line.startswith(b'rename to'):
3157 filename += b' => %s' % line[10:]
3157 filename += b' => %s' % line[10:]
3158 addresult()
3158 addresult()
3159 return results
3159 return results
3160
3160
3161
3161
3162 def diffstat(lines, width=80):
3162 def diffstat(lines, width=80):
3163 output = []
3163 output = []
3164 stats = diffstatdata(lines)
3164 stats = diffstatdata(lines)
3165 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
3165 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
3166
3166
3167 countwidth = len(str(maxtotal))
3167 countwidth = len(str(maxtotal))
3168 if hasbinary and countwidth < 3:
3168 if hasbinary and countwidth < 3:
3169 countwidth = 3
3169 countwidth = 3
3170 graphwidth = width - countwidth - maxname - 6
3170 graphwidth = width - countwidth - maxname - 6
3171 if graphwidth < 10:
3171 if graphwidth < 10:
3172 graphwidth = 10
3172 graphwidth = 10
3173
3173
3174 def scale(i):
3174 def scale(i):
3175 if maxtotal <= graphwidth:
3175 if maxtotal <= graphwidth:
3176 return i
3176 return i
3177 # If diffstat runs out of room it doesn't print anything,
3177 # If diffstat runs out of room it doesn't print anything,
3178 # which isn't very useful, so always print at least one + or -
3178 # which isn't very useful, so always print at least one + or -
3179 # if there were at least some changes.
3179 # if there were at least some changes.
3180 return max(i * graphwidth // maxtotal, int(bool(i)))
3180 return max(i * graphwidth // maxtotal, int(bool(i)))
3181
3181
3182 for filename, adds, removes, isbinary in stats:
3182 for filename, adds, removes, isbinary in stats:
3183 if isbinary:
3183 if isbinary:
3184 count = b'Bin'
3184 count = b'Bin'
3185 else:
3185 else:
3186 count = b'%d' % (adds + removes)
3186 count = b'%d' % (adds + removes)
3187 pluses = b'+' * scale(adds)
3187 pluses = b'+' * scale(adds)
3188 minuses = b'-' * scale(removes)
3188 minuses = b'-' * scale(removes)
3189 output.append(
3189 output.append(
3190 b' %s%s | %*s %s%s\n'
3190 b' %s%s | %*s %s%s\n'
3191 % (
3191 % (
3192 filename,
3192 filename,
3193 b' ' * (maxname - encoding.colwidth(filename)),
3193 b' ' * (maxname - encoding.colwidth(filename)),
3194 countwidth,
3194 countwidth,
3195 count,
3195 count,
3196 pluses,
3196 pluses,
3197 minuses,
3197 minuses,
3198 )
3198 )
3199 )
3199 )
3200
3200
3201 if stats:
3201 if stats:
3202 output.append(
3202 output.append(
3203 _(b' %d files changed, %d insertions(+), %d deletions(-)\n')
3203 _(b' %d files changed, %d insertions(+), %d deletions(-)\n')
3204 % (len(stats), totaladds, totalremoves)
3204 % (len(stats), totaladds, totalremoves)
3205 )
3205 )
3206
3206
3207 return b''.join(output)
3207 return b''.join(output)
3208
3208
3209
3209
3210 def diffstatui(*args, **kw):
3210 def diffstatui(*args, **kw):
3211 '''like diffstat(), but yields 2-tuples of (output, label) for
3211 '''like diffstat(), but yields 2-tuples of (output, label) for
3212 ui.write()
3212 ui.write()
3213 '''
3213 '''
3214
3214
3215 for line in diffstat(*args, **kw).splitlines():
3215 for line in diffstat(*args, **kw).splitlines():
3216 if line and line[-1] in b'+-':
3216 if line and line[-1] in b'+-':
3217 name, graph = line.rsplit(b' ', 1)
3217 name, graph = line.rsplit(b' ', 1)
3218 yield (name + b' ', b'')
3218 yield (name + b' ', b'')
3219 m = re.search(br'\++', graph)
3219 m = re.search(br'\++', graph)
3220 if m:
3220 if m:
3221 yield (m.group(0), b'diffstat.inserted')
3221 yield (m.group(0), b'diffstat.inserted')
3222 m = re.search(br'-+', graph)
3222 m = re.search(br'-+', graph)
3223 if m:
3223 if m:
3224 yield (m.group(0), b'diffstat.deleted')
3224 yield (m.group(0), b'diffstat.deleted')
3225 else:
3225 else:
3226 yield (line, b'')
3226 yield (line, b'')
3227 yield (b'\n', b'')
3227 yield (b'\n', b'')
@@ -1,59 +1,104 b''
1 ROOT = CWD + "/../.."
1 ROOT = CWD + "/../.."
2
2
3 def make_exe():
3 # Code to run in Python interpreter.
4 dist = default_python_distribution()
4 RUN_CODE = "import hgdemandimport; hgdemandimport.enable(); from mercurial import dispatch; dispatch.run()"
5
6
7 set_build_path(ROOT + "/build/pyoxidizer")
8
5
9
6 code = "import hgdemandimport; hgdemandimport.enable(); from mercurial import dispatch; dispatch.run()"
10 def make_distribution():
11 return default_python_distribution()
12
7
13
14 def make_distribution_windows():
15 return default_python_distribution(flavor="standalone_dynamic")
16
17
18 def make_exe(dist):
8 config = PythonInterpreterConfig(
19 config = PythonInterpreterConfig(
9 raw_allocator = "system",
20 raw_allocator = "system",
10 run_eval = code,
21 run_eval = RUN_CODE,
11 # We want to let the user load extensions from the file system
22 # We want to let the user load extensions from the file system
12 filesystem_importer = True,
23 filesystem_importer = True,
13 # We need this to make resourceutil happy, since it looks for sys.frozen.
24 # We need this to make resourceutil happy, since it looks for sys.frozen.
14 sys_frozen = True,
25 sys_frozen = True,
15 legacy_windows_stdio = True,
26 legacy_windows_stdio = True,
16 )
27 )
17
28
18 exe = dist.to_python_executable(
29 exe = dist.to_python_executable(
19 name = "hg",
30 name = "hg",
20 resources_policy = "prefer-in-memory-fallback-filesystem-relative:lib",
31 resources_policy = "prefer-in-memory-fallback-filesystem-relative:lib",
21 config = config,
32 config = config,
22 # Extension may depend on any Python functionality. Include all
33 # Extension may depend on any Python functionality. Include all
23 # extensions.
34 # extensions.
24 extension_module_filter = "all",
35 extension_module_filter = "all",
25 )
36 )
26
37
27 exe.add_python_resources(dist.pip_install([ROOT]))
38 # Add Mercurial to resources.
39 for resource in dist.pip_install(["--verbose", ROOT]):
40 # This is a bit wonky and worth explaining.
41 #
42 # Various parts of Mercurial don't yet support loading package
43 # resources via the ResourceReader interface. Or, not having
44 # file-based resources would be too inconvenient for users.
45 #
46 # So, for package resources, we package them both in the
47 # filesystem as well as in memory. If both are defined,
48 # PyOxidizer will prefer the in-memory location. So even
49 # if the filesystem file isn't packaged in the location
50 # specified here, we should never encounter an errors as the
51 # resource will always be available in memory.
52 if type(resource) == "PythonPackageResource":
53 exe.add_filesystem_relative_python_resource(".", resource)
54 exe.add_in_memory_python_resource(resource)
55 else:
56 exe.add_python_resource(resource)
57
58 # On Windows, we install extra packages for convenience.
59 if "windows" in BUILD_TARGET_TRIPLE:
60 exe.add_python_resources(
61 dist.pip_install(["-r", ROOT + "/contrib/packaging/requirements_win32.txt"])
62 )
28
63
29 return exe
64 return exe
30
65
31 def make_install(exe):
66
67 def make_manifest(dist, exe):
32 m = FileManifest()
68 m = FileManifest()
33
34 # `hg` goes in root directory.
35 m.add_python_resource(".", exe)
69 m.add_python_resource(".", exe)
36
70
37 templates = glob(
71 return m
38 include = [ROOT + "/mercurial/templates/**/*"],
39 strip_prefix = ROOT + "/mercurial/",
40 )
41 m.add_manifest(templates)
42
72
43 return m
44
73
45 def make_embedded_resources(exe):
74 def make_embedded_resources(exe):
46 return exe.to_embedded_resources()
75 return exe.to_embedded_resources()
47
76
48 register_target("exe", make_exe)
77
49 register_target("app", make_install, depends = ["exe"], default = True)
78 register_target("distribution_posix", make_distribution)
50 register_target("embedded", make_embedded_resources, depends = ["exe"], default_build_script = True)
79 register_target("distribution_windows", make_distribution_windows)
80
81 register_target("exe_posix", make_exe, depends = ["distribution_posix"])
82 register_target("exe_windows", make_exe, depends = ["distribution_windows"])
83
84 register_target(
85 "app_posix",
86 make_manifest,
87 depends = ["distribution_posix", "exe_posix"],
88 default = "windows" not in BUILD_TARGET_TRIPLE,
89 )
90 register_target(
91 "app_windows",
92 make_manifest,
93 depends = ["distribution_windows", "exe_windows"],
94 default = "windows" in BUILD_TARGET_TRIPLE,
95 )
96
51 resolve_targets()
97 resolve_targets()
52
98
53 # END OF COMMON USER-ADJUSTED SETTINGS.
99 # END OF COMMON USER-ADJUSTED SETTINGS.
54 #
100 #
55 # Everything below this is typically managed by PyOxidizer and doesn't need
101 # Everything below this is typically managed by PyOxidizer and doesn't need
56 # to be updated by people.
102 # to be updated by people.
57
103
58 PYOXIDIZER_VERSION = "0.7.0-pre"
104 PYOXIDIZER_VERSION = "0.7.0"
59 PYOXIDIZER_COMMIT = "c772a1379c3026314eda1c8ea244b86c0658951d"
@@ -1,93 +1,94 b''
1 #require test-repo
1 #require test-repo
2
2
3 $ . "$TESTDIR/helpers-testrepo.sh"
3 $ . "$TESTDIR/helpers-testrepo.sh"
4 $ check_code="$TESTDIR"/../contrib/check-code.py
4 $ check_code="$TESTDIR"/../contrib/check-code.py
5 $ cd "$TESTDIR"/..
5 $ cd "$TESTDIR"/..
6
6
7 New errors are not allowed. Warnings are strongly discouraged.
7 New errors are not allowed. Warnings are strongly discouraged.
8 (The writing "no-che?k-code" is for not skipping this file when checking.)
8 (The writing "no-che?k-code" is for not skipping this file when checking.)
9
9
10 $ testrepohg locate \
10 $ testrepohg locate \
11 > -X contrib/python-zstandard \
11 > -X contrib/python-zstandard \
12 > -X hgext/fsmonitor/pywatchman \
12 > -X hgext/fsmonitor/pywatchman \
13 > -X mercurial/thirdparty \
13 > -X mercurial/thirdparty \
14 > | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false
14 > | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false
15 Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob)
15 Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob)
16 Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob)
16 Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob)
17 Skipping contrib/automation/hgautomation/cli.py it has no-che?k-code (glob)
17 Skipping contrib/automation/hgautomation/cli.py it has no-che?k-code (glob)
18 Skipping contrib/automation/hgautomation/linux.py it has no-che?k-code (glob)
18 Skipping contrib/automation/hgautomation/linux.py it has no-che?k-code (glob)
19 Skipping contrib/automation/hgautomation/pypi.py it has no-che?k-code (glob)
19 Skipping contrib/automation/hgautomation/pypi.py it has no-che?k-code (glob)
20 Skipping contrib/automation/hgautomation/ssh.py it has no-che?k-code (glob)
20 Skipping contrib/automation/hgautomation/ssh.py it has no-che?k-code (glob)
21 Skipping contrib/automation/hgautomation/try_server.py it has no-che?k-code (glob)
21 Skipping contrib/automation/hgautomation/try_server.py it has no-che?k-code (glob)
22 Skipping contrib/automation/hgautomation/windows.py it has no-che?k-code (glob)
22 Skipping contrib/automation/hgautomation/windows.py it has no-che?k-code (glob)
23 Skipping contrib/automation/hgautomation/winrm.py it has no-che?k-code (glob)
23 Skipping contrib/automation/hgautomation/winrm.py it has no-che?k-code (glob)
24 Skipping contrib/fuzz/FuzzedDataProvider.h it has no-che?k-code (glob)
24 Skipping contrib/fuzz/FuzzedDataProvider.h it has no-che?k-code (glob)
25 Skipping contrib/fuzz/standalone_fuzz_target_runner.cc it has no-che?k-code (glob)
25 Skipping contrib/fuzz/standalone_fuzz_target_runner.cc it has no-che?k-code (glob)
26 Skipping contrib/packaging/hgpackaging/cli.py it has no-che?k-code (glob)
26 Skipping contrib/packaging/hgpackaging/cli.py it has no-che?k-code (glob)
27 Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
27 Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
28 Skipping contrib/packaging/hgpackaging/inno.py it has no-che?k-code (glob)
28 Skipping contrib/packaging/hgpackaging/inno.py it has no-che?k-code (glob)
29 Skipping contrib/packaging/hgpackaging/py2exe.py it has no-che?k-code (glob)
29 Skipping contrib/packaging/hgpackaging/py2exe.py it has no-che?k-code (glob)
30 Skipping contrib/packaging/hgpackaging/pyoxidizer.py it has no-che?k-code (glob)
30 Skipping contrib/packaging/hgpackaging/util.py it has no-che?k-code (glob)
31 Skipping contrib/packaging/hgpackaging/util.py it has no-che?k-code (glob)
31 Skipping contrib/packaging/hgpackaging/wix.py it has no-che?k-code (glob)
32 Skipping contrib/packaging/hgpackaging/wix.py it has no-che?k-code (glob)
32 Skipping i18n/polib.py it has no-che?k-code (glob)
33 Skipping i18n/polib.py it has no-che?k-code (glob)
33 Skipping mercurial/statprof.py it has no-che?k-code (glob)
34 Skipping mercurial/statprof.py it has no-che?k-code (glob)
34 Skipping tests/badserverext.py it has no-che?k-code (glob)
35 Skipping tests/badserverext.py it has no-che?k-code (glob)
35
36
36 @commands in debugcommands.py should be in alphabetical order.
37 @commands in debugcommands.py should be in alphabetical order.
37
38
38 >>> import re
39 >>> import re
39 >>> commands = []
40 >>> commands = []
40 >>> with open('mercurial/debugcommands.py', 'rb') as fh:
41 >>> with open('mercurial/debugcommands.py', 'rb') as fh:
41 ... for line in fh:
42 ... for line in fh:
42 ... m = re.match(br"^@command\('([a-z]+)", line)
43 ... m = re.match(br"^@command\('([a-z]+)", line)
43 ... if m:
44 ... if m:
44 ... commands.append(m.group(1))
45 ... commands.append(m.group(1))
45 >>> scommands = list(sorted(commands))
46 >>> scommands = list(sorted(commands))
46 >>> for i, command in enumerate(scommands):
47 >>> for i, command in enumerate(scommands):
47 ... if command != commands[i]:
48 ... if command != commands[i]:
48 ... print('commands in debugcommands.py not sorted; first differing '
49 ... print('commands in debugcommands.py not sorted; first differing '
49 ... 'command is %s; expected %s' % (commands[i], command))
50 ... 'command is %s; expected %s' % (commands[i], command))
50 ... break
51 ... break
51
52
52 Prevent adding new files in the root directory accidentally.
53 Prevent adding new files in the root directory accidentally.
53
54
54 $ testrepohg files 'glob:*'
55 $ testrepohg files 'glob:*'
55 .arcconfig
56 .arcconfig
56 .clang-format
57 .clang-format
57 .editorconfig
58 .editorconfig
58 .hgignore
59 .hgignore
59 .hgsigs
60 .hgsigs
60 .hgtags
61 .hgtags
61 .jshintrc
62 .jshintrc
62 CONTRIBUTING
63 CONTRIBUTING
63 CONTRIBUTORS
64 CONTRIBUTORS
64 COPYING
65 COPYING
65 Makefile
66 Makefile
66 README.rst
67 README.rst
67 black.toml
68 black.toml
68 hg
69 hg
69 hgeditor
70 hgeditor
70 hgweb.cgi
71 hgweb.cgi
71 setup.py
72 setup.py
72
73
73 Prevent adding modules which could be shadowed by ancient .so/.dylib.
74 Prevent adding modules which could be shadowed by ancient .so/.dylib.
74
75
75 $ testrepohg files \
76 $ testrepohg files \
76 > mercurial/base85.py \
77 > mercurial/base85.py \
77 > mercurial/bdiff.py \
78 > mercurial/bdiff.py \
78 > mercurial/diffhelpers.py \
79 > mercurial/diffhelpers.py \
79 > mercurial/mpatch.py \
80 > mercurial/mpatch.py \
80 > mercurial/osutil.py \
81 > mercurial/osutil.py \
81 > mercurial/parsers.py \
82 > mercurial/parsers.py \
82 > mercurial/zstd.py
83 > mercurial/zstd.py
83 [1]
84 [1]
84
85
85 Keep python3 tests sorted:
86 Keep python3 tests sorted:
86 $ sort < contrib/python3-whitelist > $TESTTMP/py3sorted
87 $ sort < contrib/python3-whitelist > $TESTTMP/py3sorted
87 $ cmp contrib/python3-whitelist $TESTTMP/py3sorted || echo 'Please sort passing tests!'
88 $ cmp contrib/python3-whitelist $TESTTMP/py3sorted || echo 'Please sort passing tests!'
88
89
89 Keep Windows line endings in check
90 Keep Windows line endings in check
90
91
91 $ hg files 'set:eol(dos)'
92 $ hg files 'set:eol(dos)'
92 contrib/win32/hg.bat
93 contrib/win32/hg.bat
93 contrib/win32/mercurial.ini
94 contrib/win32/mercurial.ini
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now