##// END OF EJS Templates
branching: merge default into stable for 6.1 freeze
Raphaël Gomès -
r50035:c00d3ce4 merge 6.1rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,576 +1,576 b''
1 # cli.py - Command line interface for automation
1 # cli.py - Command line interface for automation
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import argparse
10 import argparse
11 import concurrent.futures as futures
11 import concurrent.futures as futures
12 import os
12 import os
13 import pathlib
13 import pathlib
14 import time
14 import time
15
15
16 from . import (
16 from . import (
17 aws,
17 aws,
18 HGAutomation,
18 HGAutomation,
19 linux,
19 linux,
20 try_server,
20 try_server,
21 windows,
21 windows,
22 )
22 )
23
23
24
24
25 SOURCE_ROOT = pathlib.Path(
25 SOURCE_ROOT = pathlib.Path(
26 os.path.abspath(__file__)
26 os.path.abspath(__file__)
27 ).parent.parent.parent.parent
27 ).parent.parent.parent.parent
28 DIST_PATH = SOURCE_ROOT / 'dist'
28 DIST_PATH = SOURCE_ROOT / 'dist'
29
29
30
30
31 def bootstrap_linux_dev(
31 def bootstrap_linux_dev(
32 hga: HGAutomation, aws_region, distros=None, parallel=False
32 hga: HGAutomation, aws_region, distros=None, parallel=False
33 ):
33 ):
34 c = hga.aws_connection(aws_region)
34 c = hga.aws_connection(aws_region)
35
35
36 if distros:
36 if distros:
37 distros = distros.split(',')
37 distros = distros.split(',')
38 else:
38 else:
39 distros = sorted(linux.DISTROS)
39 distros = sorted(linux.DISTROS)
40
40
41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
42 # the context manager that is supposed to terminate the temporary EC2
42 # the context manager that is supposed to terminate the temporary EC2
43 # instance doesn't run. Until we fix this, make parallel building opt-in
43 # instance doesn't run. Until we fix this, make parallel building opt-in
44 # so we don't orphan instances.
44 # so we don't orphan instances.
45 if parallel:
45 if parallel:
46 fs = []
46 fs = []
47
47
48 with futures.ThreadPoolExecutor(len(distros)) as e:
48 with futures.ThreadPoolExecutor(len(distros)) as e:
49 for distro in distros:
49 for distro in distros:
50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
51
51
52 for f in fs:
52 for f in fs:
53 f.result()
53 f.result()
54 else:
54 else:
55 for distro in distros:
55 for distro in distros:
56 aws.ensure_linux_dev_ami(c, distro=distro)
56 aws.ensure_linux_dev_ami(c, distro=distro)
57
57
58
58
59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
60 c = hga.aws_connection(aws_region)
60 c = hga.aws_connection(aws_region)
61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
62 print('Windows development AMI available as %s' % image.id)
62 print('Windows development AMI available as %s' % image.id)
63
63
64
64
65 def build_inno(
65 def build_inno(
66 hga: HGAutomation,
66 hga: HGAutomation,
67 aws_region,
67 aws_region,
68 python_version,
68 python_version,
69 arch,
69 arch,
70 revision,
70 revision,
71 version,
71 version,
72 base_image_name,
72 base_image_name,
73 ):
73 ):
74 c = hga.aws_connection(aws_region)
74 c = hga.aws_connection(aws_region)
75 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
75 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
76 DIST_PATH.mkdir(exist_ok=True)
76 DIST_PATH.mkdir(exist_ok=True)
77
77
78 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
78 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
79 instance = insts[0]
79 instance = insts[0]
80
80
81 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
81 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
82
82
83 for py_version in python_version:
83 for py_version in python_version:
84 for a in arch:
84 for a in arch:
85 windows.build_inno_installer(
85 windows.build_inno_installer(
86 instance.winrm_client,
86 instance.winrm_client,
87 py_version,
87 py_version,
88 a,
88 a,
89 DIST_PATH,
89 DIST_PATH,
90 version=version,
90 version=version,
91 )
91 )
92
92
93
93
94 def build_wix(
94 def build_wix(
95 hga: HGAutomation,
95 hga: HGAutomation,
96 aws_region,
96 aws_region,
97 python_version,
97 python_version,
98 arch,
98 arch,
99 revision,
99 revision,
100 version,
100 version,
101 base_image_name,
101 base_image_name,
102 ):
102 ):
103 c = hga.aws_connection(aws_region)
103 c = hga.aws_connection(aws_region)
104 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
104 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
105 DIST_PATH.mkdir(exist_ok=True)
105 DIST_PATH.mkdir(exist_ok=True)
106
106
107 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
107 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
108 instance = insts[0]
108 instance = insts[0]
109
109
110 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
110 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
111
111
112 for py_version in python_version:
112 for py_version in python_version:
113 for a in arch:
113 for a in arch:
114 windows.build_wix_installer(
114 windows.build_wix_installer(
115 instance.winrm_client,
115 instance.winrm_client,
116 py_version,
116 py_version,
117 a,
117 a,
118 DIST_PATH,
118 DIST_PATH,
119 version=version,
119 version=version,
120 )
120 )
121
121
122
122
123 def build_windows_wheel(
123 def build_windows_wheel(
124 hga: HGAutomation,
124 hga: HGAutomation,
125 aws_region,
125 aws_region,
126 python_version,
126 python_version,
127 arch,
127 arch,
128 revision,
128 revision,
129 base_image_name,
129 base_image_name,
130 ):
130 ):
131 c = hga.aws_connection(aws_region)
131 c = hga.aws_connection(aws_region)
132 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
132 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
133 DIST_PATH.mkdir(exist_ok=True)
133 DIST_PATH.mkdir(exist_ok=True)
134
134
135 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
135 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
136 instance = insts[0]
136 instance = insts[0]
137
137
138 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
138 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
139
139
140 for py_version in python_version:
140 for py_version in python_version:
141 for a in arch:
141 for a in arch:
142 windows.build_wheel(
142 windows.build_wheel(
143 instance.winrm_client, py_version, a, DIST_PATH
143 instance.winrm_client, py_version, a, DIST_PATH
144 )
144 )
145
145
146
146
147 def build_all_windows_packages(
147 def build_all_windows_packages(
148 hga: HGAutomation, aws_region, revision, version, base_image_name
148 hga: HGAutomation, aws_region, revision, version, base_image_name
149 ):
149 ):
150 c = hga.aws_connection(aws_region)
150 c = hga.aws_connection(aws_region)
151 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
151 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
152 DIST_PATH.mkdir(exist_ok=True)
152 DIST_PATH.mkdir(exist_ok=True)
153
153
154 with aws.temporary_windows_dev_instances(c, image, 'm6i.large') as insts:
154 with aws.temporary_windows_dev_instances(c, image, 'm6i.large') as insts:
155 instance = insts[0]
155 instance = insts[0]
156
156
157 winrm_client = instance.winrm_client
157 winrm_client = instance.winrm_client
158
158
159 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
159 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
160
160
161 for py_version in ("2.7", "3.7", "3.8", "3.9"):
161 for py_version in ("2.7", "3.7", "3.8", "3.9", "3.10"):
162 for arch in ("x86", "x64"):
162 for arch in ("x86", "x64"):
163 windows.purge_hg(winrm_client)
163 windows.purge_hg(winrm_client)
164 windows.build_wheel(
164 windows.build_wheel(
165 winrm_client,
165 winrm_client,
166 python_version=py_version,
166 python_version=py_version,
167 arch=arch,
167 arch=arch,
168 dest_path=DIST_PATH,
168 dest_path=DIST_PATH,
169 )
169 )
170
170
171 for py_version in (2, 3):
171 for py_version in (2, 3):
172 for arch in ('x86', 'x64'):
172 for arch in ('x86', 'x64'):
173 windows.purge_hg(winrm_client)
173 windows.purge_hg(winrm_client)
174 windows.build_inno_installer(
174 windows.build_inno_installer(
175 winrm_client, py_version, arch, DIST_PATH, version=version
175 winrm_client, py_version, arch, DIST_PATH, version=version
176 )
176 )
177 windows.build_wix_installer(
177 windows.build_wix_installer(
178 winrm_client, py_version, arch, DIST_PATH, version=version
178 winrm_client, py_version, arch, DIST_PATH, version=version
179 )
179 )
180
180
181
181
182 def terminate_ec2_instances(hga: HGAutomation, aws_region):
182 def terminate_ec2_instances(hga: HGAutomation, aws_region):
183 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
183 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
184 aws.terminate_ec2_instances(c.ec2resource)
184 aws.terminate_ec2_instances(c.ec2resource)
185
185
186
186
187 def purge_ec2_resources(hga: HGAutomation, aws_region):
187 def purge_ec2_resources(hga: HGAutomation, aws_region):
188 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
188 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
189 aws.remove_resources(c)
189 aws.remove_resources(c)
190
190
191
191
192 def run_tests_linux(
192 def run_tests_linux(
193 hga: HGAutomation,
193 hga: HGAutomation,
194 aws_region,
194 aws_region,
195 instance_type,
195 instance_type,
196 python_version,
196 python_version,
197 test_flags,
197 test_flags,
198 distro,
198 distro,
199 filesystem,
199 filesystem,
200 ):
200 ):
201 c = hga.aws_connection(aws_region)
201 c = hga.aws_connection(aws_region)
202 image = aws.ensure_linux_dev_ami(c, distro=distro)
202 image = aws.ensure_linux_dev_ami(c, distro=distro)
203
203
204 t_start = time.time()
204 t_start = time.time()
205
205
206 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
206 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
207
207
208 with aws.temporary_linux_dev_instances(
208 with aws.temporary_linux_dev_instances(
209 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
209 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
210 ) as insts:
210 ) as insts:
211
211
212 instance = insts[0]
212 instance = insts[0]
213
213
214 linux.prepare_exec_environment(
214 linux.prepare_exec_environment(
215 instance.ssh_client, filesystem=filesystem
215 instance.ssh_client, filesystem=filesystem
216 )
216 )
217 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
217 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
218 t_prepared = time.time()
218 t_prepared = time.time()
219 linux.run_tests(instance.ssh_client, python_version, test_flags)
219 linux.run_tests(instance.ssh_client, python_version, test_flags)
220 t_done = time.time()
220 t_done = time.time()
221
221
222 t_setup = t_prepared - t_start
222 t_setup = t_prepared - t_start
223 t_all = t_done - t_start
223 t_all = t_done - t_start
224
224
225 print(
225 print(
226 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
226 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
227 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
227 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
228 )
228 )
229
229
230
230
231 def run_tests_windows(
231 def run_tests_windows(
232 hga: HGAutomation,
232 hga: HGAutomation,
233 aws_region,
233 aws_region,
234 instance_type,
234 instance_type,
235 python_version,
235 python_version,
236 arch,
236 arch,
237 test_flags,
237 test_flags,
238 base_image_name,
238 base_image_name,
239 ):
239 ):
240 c = hga.aws_connection(aws_region)
240 c = hga.aws_connection(aws_region)
241 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
241 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
242
242
243 with aws.temporary_windows_dev_instances(
243 with aws.temporary_windows_dev_instances(
244 c, image, instance_type, disable_antivirus=True
244 c, image, instance_type, disable_antivirus=True
245 ) as insts:
245 ) as insts:
246 instance = insts[0]
246 instance = insts[0]
247
247
248 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
248 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
249 windows.run_tests(
249 windows.run_tests(
250 instance.winrm_client, python_version, arch, test_flags
250 instance.winrm_client, python_version, arch, test_flags
251 )
251 )
252
252
253
253
254 def publish_windows_artifacts(
254 def publish_windows_artifacts(
255 hg: HGAutomation,
255 hg: HGAutomation,
256 aws_region,
256 aws_region,
257 version: str,
257 version: str,
258 pypi: bool,
258 pypi: bool,
259 mercurial_scm_org: bool,
259 mercurial_scm_org: bool,
260 ssh_username: str,
260 ssh_username: str,
261 ):
261 ):
262 windows.publish_artifacts(
262 windows.publish_artifacts(
263 DIST_PATH,
263 DIST_PATH,
264 version,
264 version,
265 pypi=pypi,
265 pypi=pypi,
266 mercurial_scm_org=mercurial_scm_org,
266 mercurial_scm_org=mercurial_scm_org,
267 ssh_username=ssh_username,
267 ssh_username=ssh_username,
268 )
268 )
269
269
270
270
271 def run_try(hga: HGAutomation, aws_region: str, rev: str):
271 def run_try(hga: HGAutomation, aws_region: str, rev: str):
272 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
272 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
273 try_server.trigger_try(c, rev=rev)
273 try_server.trigger_try(c, rev=rev)
274
274
275
275
276 def get_parser():
276 def get_parser():
277 parser = argparse.ArgumentParser()
277 parser = argparse.ArgumentParser()
278
278
279 parser.add_argument(
279 parser.add_argument(
280 '--state-path',
280 '--state-path',
281 default='~/.hgautomation',
281 default='~/.hgautomation',
282 help='Path for local state files',
282 help='Path for local state files',
283 )
283 )
284 parser.add_argument(
284 parser.add_argument(
285 '--aws-region',
285 '--aws-region',
286 help='AWS region to use',
286 help='AWS region to use',
287 default='us-west-2',
287 default='us-west-2',
288 )
288 )
289
289
290 subparsers = parser.add_subparsers()
290 subparsers = parser.add_subparsers()
291
291
292 sp = subparsers.add_parser(
292 sp = subparsers.add_parser(
293 'bootstrap-linux-dev',
293 'bootstrap-linux-dev',
294 help='Bootstrap Linux development environments',
294 help='Bootstrap Linux development environments',
295 )
295 )
296 sp.add_argument(
296 sp.add_argument(
297 '--distros',
297 '--distros',
298 help='Comma delimited list of distros to bootstrap',
298 help='Comma delimited list of distros to bootstrap',
299 )
299 )
300 sp.add_argument(
300 sp.add_argument(
301 '--parallel',
301 '--parallel',
302 action='store_true',
302 action='store_true',
303 help='Generate AMIs in parallel (not CTRL-c safe)',
303 help='Generate AMIs in parallel (not CTRL-c safe)',
304 )
304 )
305 sp.set_defaults(func=bootstrap_linux_dev)
305 sp.set_defaults(func=bootstrap_linux_dev)
306
306
307 sp = subparsers.add_parser(
307 sp = subparsers.add_parser(
308 'bootstrap-windows-dev',
308 'bootstrap-windows-dev',
309 help='Bootstrap the Windows development environment',
309 help='Bootstrap the Windows development environment',
310 )
310 )
311 sp.add_argument(
311 sp.add_argument(
312 '--base-image-name',
312 '--base-image-name',
313 help='AMI name of base image',
313 help='AMI name of base image',
314 default=aws.WINDOWS_BASE_IMAGE_NAME,
314 default=aws.WINDOWS_BASE_IMAGE_NAME,
315 )
315 )
316 sp.set_defaults(func=bootstrap_windows_dev)
316 sp.set_defaults(func=bootstrap_windows_dev)
317
317
318 sp = subparsers.add_parser(
318 sp = subparsers.add_parser(
319 'build-all-windows-packages',
319 'build-all-windows-packages',
320 help='Build all Windows packages',
320 help='Build all Windows packages',
321 )
321 )
322 sp.add_argument(
322 sp.add_argument(
323 '--revision',
323 '--revision',
324 help='Mercurial revision to build',
324 help='Mercurial revision to build',
325 default='.',
325 default='.',
326 )
326 )
327 sp.add_argument(
327 sp.add_argument(
328 '--version',
328 '--version',
329 help='Mercurial version string to use',
329 help='Mercurial version string to use',
330 )
330 )
331 sp.add_argument(
331 sp.add_argument(
332 '--base-image-name',
332 '--base-image-name',
333 help='AMI name of base image',
333 help='AMI name of base image',
334 default=aws.WINDOWS_BASE_IMAGE_NAME,
334 default=aws.WINDOWS_BASE_IMAGE_NAME,
335 )
335 )
336 sp.set_defaults(func=build_all_windows_packages)
336 sp.set_defaults(func=build_all_windows_packages)
337
337
338 sp = subparsers.add_parser(
338 sp = subparsers.add_parser(
339 'build-inno',
339 'build-inno',
340 help='Build Inno Setup installer(s)',
340 help='Build Inno Setup installer(s)',
341 )
341 )
342 sp.add_argument(
342 sp.add_argument(
343 '--python-version',
343 '--python-version',
344 help='Which version of Python to target',
344 help='Which version of Python to target',
345 choices={2, 3},
345 choices={2, 3},
346 type=int,
346 type=int,
347 nargs='*',
347 nargs='*',
348 default=[3],
348 default=[3],
349 )
349 )
350 sp.add_argument(
350 sp.add_argument(
351 '--arch',
351 '--arch',
352 help='Architecture to build for',
352 help='Architecture to build for',
353 choices={'x86', 'x64'},
353 choices={'x86', 'x64'},
354 nargs='*',
354 nargs='*',
355 default=['x64'],
355 default=['x64'],
356 )
356 )
357 sp.add_argument(
357 sp.add_argument(
358 '--revision',
358 '--revision',
359 help='Mercurial revision to build',
359 help='Mercurial revision to build',
360 default='.',
360 default='.',
361 )
361 )
362 sp.add_argument(
362 sp.add_argument(
363 '--version',
363 '--version',
364 help='Mercurial version string to use in installer',
364 help='Mercurial version string to use in installer',
365 )
365 )
366 sp.add_argument(
366 sp.add_argument(
367 '--base-image-name',
367 '--base-image-name',
368 help='AMI name of base image',
368 help='AMI name of base image',
369 default=aws.WINDOWS_BASE_IMAGE_NAME,
369 default=aws.WINDOWS_BASE_IMAGE_NAME,
370 )
370 )
371 sp.set_defaults(func=build_inno)
371 sp.set_defaults(func=build_inno)
372
372
373 sp = subparsers.add_parser(
373 sp = subparsers.add_parser(
374 'build-windows-wheel',
374 'build-windows-wheel',
375 help='Build Windows wheel(s)',
375 help='Build Windows wheel(s)',
376 )
376 )
377 sp.add_argument(
377 sp.add_argument(
378 '--python-version',
378 '--python-version',
379 help='Python version to build for',
379 help='Python version to build for',
380 choices={'2.7', '3.7', '3.8', '3.9'},
380 choices={'2.7', '3.7', '3.8', '3.9', '3.10'},
381 nargs='*',
381 nargs='*',
382 default=['3.8'],
382 default=['3.8'],
383 )
383 )
384 sp.add_argument(
384 sp.add_argument(
385 '--arch',
385 '--arch',
386 help='Architecture to build for',
386 help='Architecture to build for',
387 choices={'x86', 'x64'},
387 choices={'x86', 'x64'},
388 nargs='*',
388 nargs='*',
389 default=['x64'],
389 default=['x64'],
390 )
390 )
391 sp.add_argument(
391 sp.add_argument(
392 '--revision',
392 '--revision',
393 help='Mercurial revision to build',
393 help='Mercurial revision to build',
394 default='.',
394 default='.',
395 )
395 )
396 sp.add_argument(
396 sp.add_argument(
397 '--base-image-name',
397 '--base-image-name',
398 help='AMI name of base image',
398 help='AMI name of base image',
399 default=aws.WINDOWS_BASE_IMAGE_NAME,
399 default=aws.WINDOWS_BASE_IMAGE_NAME,
400 )
400 )
401 sp.set_defaults(func=build_windows_wheel)
401 sp.set_defaults(func=build_windows_wheel)
402
402
403 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
403 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
404 sp.add_argument(
404 sp.add_argument(
405 '--python-version',
405 '--python-version',
406 help='Which version of Python to target',
406 help='Which version of Python to target',
407 choices={2, 3},
407 choices={2, 3},
408 type=int,
408 type=int,
409 nargs='*',
409 nargs='*',
410 default=[3],
410 default=[3],
411 )
411 )
412 sp.add_argument(
412 sp.add_argument(
413 '--arch',
413 '--arch',
414 help='Architecture to build for',
414 help='Architecture to build for',
415 choices={'x86', 'x64'},
415 choices={'x86', 'x64'},
416 nargs='*',
416 nargs='*',
417 default=['x64'],
417 default=['x64'],
418 )
418 )
419 sp.add_argument(
419 sp.add_argument(
420 '--revision',
420 '--revision',
421 help='Mercurial revision to build',
421 help='Mercurial revision to build',
422 default='.',
422 default='.',
423 )
423 )
424 sp.add_argument(
424 sp.add_argument(
425 '--version',
425 '--version',
426 help='Mercurial version string to use in installer',
426 help='Mercurial version string to use in installer',
427 )
427 )
428 sp.add_argument(
428 sp.add_argument(
429 '--base-image-name',
429 '--base-image-name',
430 help='AMI name of base image',
430 help='AMI name of base image',
431 default=aws.WINDOWS_BASE_IMAGE_NAME,
431 default=aws.WINDOWS_BASE_IMAGE_NAME,
432 )
432 )
433 sp.set_defaults(func=build_wix)
433 sp.set_defaults(func=build_wix)
434
434
435 sp = subparsers.add_parser(
435 sp = subparsers.add_parser(
436 'terminate-ec2-instances',
436 'terminate-ec2-instances',
437 help='Terminate all active EC2 instances managed by us',
437 help='Terminate all active EC2 instances managed by us',
438 )
438 )
439 sp.set_defaults(func=terminate_ec2_instances)
439 sp.set_defaults(func=terminate_ec2_instances)
440
440
441 sp = subparsers.add_parser(
441 sp = subparsers.add_parser(
442 'purge-ec2-resources',
442 'purge-ec2-resources',
443 help='Purge all EC2 resources managed by us',
443 help='Purge all EC2 resources managed by us',
444 )
444 )
445 sp.set_defaults(func=purge_ec2_resources)
445 sp.set_defaults(func=purge_ec2_resources)
446
446
447 sp = subparsers.add_parser(
447 sp = subparsers.add_parser(
448 'run-tests-linux',
448 'run-tests-linux',
449 help='Run tests on Linux',
449 help='Run tests on Linux',
450 )
450 )
451 sp.add_argument(
451 sp.add_argument(
452 '--distro',
452 '--distro',
453 help='Linux distribution to run tests on',
453 help='Linux distribution to run tests on',
454 choices=linux.DISTROS,
454 choices=linux.DISTROS,
455 default='debian10',
455 default='debian10',
456 )
456 )
457 sp.add_argument(
457 sp.add_argument(
458 '--filesystem',
458 '--filesystem',
459 help='Filesystem type to use',
459 help='Filesystem type to use',
460 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
460 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
461 default='default',
461 default='default',
462 )
462 )
463 sp.add_argument(
463 sp.add_argument(
464 '--instance-type',
464 '--instance-type',
465 help='EC2 instance type to use',
465 help='EC2 instance type to use',
466 default='c5.9xlarge',
466 default='c5.9xlarge',
467 )
467 )
468 sp.add_argument(
468 sp.add_argument(
469 '--python-version',
469 '--python-version',
470 help='Python version to use',
470 help='Python version to use',
471 choices={
471 choices={
472 'system2',
472 'system2',
473 'system3',
473 'system3',
474 '2.7',
474 '2.7',
475 '3.5',
475 '3.5',
476 '3.6',
476 '3.6',
477 '3.7',
477 '3.7',
478 '3.8',
478 '3.8',
479 'pypy',
479 'pypy',
480 'pypy3.5',
480 'pypy3.5',
481 'pypy3.6',
481 'pypy3.6',
482 },
482 },
483 default='system2',
483 default='system2',
484 )
484 )
485 sp.add_argument(
485 sp.add_argument(
486 'test_flags',
486 'test_flags',
487 help='Extra command line flags to pass to run-tests.py',
487 help='Extra command line flags to pass to run-tests.py',
488 nargs='*',
488 nargs='*',
489 )
489 )
490 sp.set_defaults(func=run_tests_linux)
490 sp.set_defaults(func=run_tests_linux)
491
491
492 sp = subparsers.add_parser(
492 sp = subparsers.add_parser(
493 'run-tests-windows',
493 'run-tests-windows',
494 help='Run tests on Windows',
494 help='Run tests on Windows',
495 )
495 )
496 sp.add_argument(
496 sp.add_argument(
497 '--instance-type',
497 '--instance-type',
498 help='EC2 instance type to use',
498 help='EC2 instance type to use',
499 default='m6i.large',
499 default='m6i.large',
500 )
500 )
501 sp.add_argument(
501 sp.add_argument(
502 '--python-version',
502 '--python-version',
503 help='Python version to use',
503 help='Python version to use',
504 choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9'},
504 choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
505 default='2.7',
505 default='2.7',
506 )
506 )
507 sp.add_argument(
507 sp.add_argument(
508 '--arch',
508 '--arch',
509 help='Architecture to test',
509 help='Architecture to test',
510 choices={'x86', 'x64'},
510 choices={'x86', 'x64'},
511 default='x64',
511 default='x64',
512 )
512 )
513 sp.add_argument(
513 sp.add_argument(
514 '--test-flags',
514 '--test-flags',
515 help='Extra command line flags to pass to run-tests.py',
515 help='Extra command line flags to pass to run-tests.py',
516 )
516 )
517 sp.add_argument(
517 sp.add_argument(
518 '--base-image-name',
518 '--base-image-name',
519 help='AMI name of base image',
519 help='AMI name of base image',
520 default=aws.WINDOWS_BASE_IMAGE_NAME,
520 default=aws.WINDOWS_BASE_IMAGE_NAME,
521 )
521 )
522 sp.set_defaults(func=run_tests_windows)
522 sp.set_defaults(func=run_tests_windows)
523
523
524 sp = subparsers.add_parser(
524 sp = subparsers.add_parser(
525 'publish-windows-artifacts',
525 'publish-windows-artifacts',
526 help='Publish built Windows artifacts (wheels, installers, etc)',
526 help='Publish built Windows artifacts (wheels, installers, etc)',
527 )
527 )
528 sp.add_argument(
528 sp.add_argument(
529 '--no-pypi',
529 '--no-pypi',
530 dest='pypi',
530 dest='pypi',
531 action='store_false',
531 action='store_false',
532 default=True,
532 default=True,
533 help='Skip uploading to PyPI',
533 help='Skip uploading to PyPI',
534 )
534 )
535 sp.add_argument(
535 sp.add_argument(
536 '--no-mercurial-scm-org',
536 '--no-mercurial-scm-org',
537 dest='mercurial_scm_org',
537 dest='mercurial_scm_org',
538 action='store_false',
538 action='store_false',
539 default=True,
539 default=True,
540 help='Skip uploading to www.mercurial-scm.org',
540 help='Skip uploading to www.mercurial-scm.org',
541 )
541 )
542 sp.add_argument(
542 sp.add_argument(
543 '--ssh-username',
543 '--ssh-username',
544 help='SSH username for mercurial-scm.org',
544 help='SSH username for mercurial-scm.org',
545 )
545 )
546 sp.add_argument(
546 sp.add_argument(
547 'version',
547 'version',
548 help='Mercurial version string to locate local packages',
548 help='Mercurial version string to locate local packages',
549 )
549 )
550 sp.set_defaults(func=publish_windows_artifacts)
550 sp.set_defaults(func=publish_windows_artifacts)
551
551
552 sp = subparsers.add_parser(
552 sp = subparsers.add_parser(
553 'try', help='Run CI automation against a custom changeset'
553 'try', help='Run CI automation against a custom changeset'
554 )
554 )
555 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
555 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
556 sp.set_defaults(func=run_try)
556 sp.set_defaults(func=run_try)
557
557
558 return parser
558 return parser
559
559
560
560
561 def main():
561 def main():
562 parser = get_parser()
562 parser = get_parser()
563 args = parser.parse_args()
563 args = parser.parse_args()
564
564
565 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
565 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
566 automation = HGAutomation(local_state_path)
566 automation = HGAutomation(local_state_path)
567
567
568 if not hasattr(args, 'func'):
568 if not hasattr(args, 'func'):
569 parser.print_help()
569 parser.print_help()
570 return
570 return
571
571
572 kwargs = dict(vars(args))
572 kwargs = dict(vars(args))
573 del kwargs['func']
573 del kwargs['func']
574 del kwargs['state_path']
574 del kwargs['state_path']
575
575
576 args.func(automation, **kwargs)
576 args.func(automation, **kwargs)
@@ -1,674 +1,680 b''
1 # windows.py - Automation specific to Windows
1 # windows.py - Automation specific to Windows
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import datetime
10 import datetime
11 import os
11 import os
12 import paramiko
12 import paramiko
13 import pathlib
13 import pathlib
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import tempfile
16 import tempfile
17
17
18 from .pypi import upload as pypi_upload
18 from .pypi import upload as pypi_upload
19 from .winrm import run_powershell
19 from .winrm import run_powershell
20
20
21
21
22 # PowerShell commands to activate a Visual Studio 2008 environment.
22 # PowerShell commands to activate a Visual Studio 2008 environment.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
24 ACTIVATE_VC9_AMD64 = r'''
24 ACTIVATE_VC9_AMD64 = r'''
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
27 $Env:VCINSTALLDIR = "${root}\VC\"
27 $Env:VCINSTALLDIR = "${root}\VC\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
33 '''.lstrip()
33 '''.lstrip()
34
34
35 ACTIVATE_VC9_X86 = r'''
35 ACTIVATE_VC9_X86 = r'''
36 Write-Output "activating Visual Studio 2008 environment for x86"
36 Write-Output "activating Visual Studio 2008 environment for x86"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
38 $Env:VCINSTALLDIR = "${root}\VC\"
38 $Env:VCINSTALLDIR = "${root}\VC\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
44 '''.lstrip()
44 '''.lstrip()
45
45
46 HG_PURGE = r'''
46 HG_PURGE = r'''
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
48 Set-Location C:\hgdev\src
48 Set-Location C:\hgdev\src
49 hg.exe --config extensions.purge= purge --all
49 hg.exe --config extensions.purge= purge --all
50 if ($LASTEXITCODE -ne 0) {
50 if ($LASTEXITCODE -ne 0) {
51 throw "process exited non-0: $LASTEXITCODE"
51 throw "process exited non-0: $LASTEXITCODE"
52 }
52 }
53 Write-Output "purged Mercurial repo"
53 Write-Output "purged Mercurial repo"
54 '''
54 '''
55
55
56 HG_UPDATE_CLEAN = r'''
56 HG_UPDATE_CLEAN = r'''
57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
58 Set-Location C:\hgdev\src
58 Set-Location C:\hgdev\src
59 hg.exe --config extensions.purge= purge --all
59 hg.exe --config extensions.purge= purge --all
60 if ($LASTEXITCODE -ne 0) {{
60 if ($LASTEXITCODE -ne 0) {{
61 throw "process exited non-0: $LASTEXITCODE"
61 throw "process exited non-0: $LASTEXITCODE"
62 }}
62 }}
63 hg.exe update -C {revision}
63 hg.exe update -C {revision}
64 if ($LASTEXITCODE -ne 0) {{
64 if ($LASTEXITCODE -ne 0) {{
65 throw "process exited non-0: $LASTEXITCODE"
65 throw "process exited non-0: $LASTEXITCODE"
66 }}
66 }}
67 hg.exe log -r .
67 hg.exe log -r .
68 Write-Output "updated Mercurial working directory to {revision}"
68 Write-Output "updated Mercurial working directory to {revision}"
69 '''.lstrip()
69 '''.lstrip()
70
70
71 BUILD_INNO_PYTHON3 = r'''
71 BUILD_INNO_PYTHON3 = r'''
72 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
72 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
73 $Env:CARGO_HOME = "C:\hgdev\cargo"
73 $Env:CARGO_HOME = "C:\hgdev\cargo"
74 Set-Location C:\hgdev\src
74 Set-Location C:\hgdev\src
75 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --pyoxidizer-target {pyoxidizer_target} --version {version}
75 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --pyoxidizer-target {pyoxidizer_target} --version {version}
76 if ($LASTEXITCODE -ne 0) {{
76 if ($LASTEXITCODE -ne 0) {{
77 throw "process exited non-0: $LASTEXITCODE"
77 throw "process exited non-0: $LASTEXITCODE"
78 }}
78 }}
79 '''
79 '''
80
80
81 BUILD_INNO_PYTHON2 = r'''
81 BUILD_INNO_PYTHON2 = r'''
82 Set-Location C:\hgdev\src
82 Set-Location C:\hgdev\src
83 $python = "C:\hgdev\python27-{arch}\python.exe"
83 $python = "C:\hgdev\python27-{arch}\python.exe"
84 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
84 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
85 if ($LASTEXITCODE -ne 0) {{
85 if ($LASTEXITCODE -ne 0) {{
86 throw "process exited non-0: $LASTEXITCODE"
86 throw "process exited non-0: $LASTEXITCODE"
87 }}
87 }}
88 '''.lstrip()
88 '''.lstrip()
89
89
90 BUILD_WHEEL = r'''
90 BUILD_WHEEL = r'''
91 Set-Location C:\hgdev\src
91 Set-Location C:\hgdev\src
92 C:\hgdev\python{python_version}-{arch}\python.exe -m pip wheel --wheel-dir dist .
92 C:\hgdev\python{python_version}-{arch}\python.exe -m pip wheel --wheel-dir dist .
93 if ($LASTEXITCODE -ne 0) {{
93 if ($LASTEXITCODE -ne 0) {{
94 throw "process exited non-0: $LASTEXITCODE"
94 throw "process exited non-0: $LASTEXITCODE"
95 }}
95 }}
96 '''
96 '''
97
97
98 BUILD_WIX_PYTHON3 = r'''
98 BUILD_WIX_PYTHON3 = r'''
99 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
99 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
100 $Env:CARGO_HOME = "C:\hgdev\cargo"
100 $Env:CARGO_HOME = "C:\hgdev\cargo"
101 Set-Location C:\hgdev\src
101 Set-Location C:\hgdev\src
102 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --pyoxidizer-target {pyoxidizer_target} --version {version}
102 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --pyoxidizer-target {pyoxidizer_target} --version {version}
103 if ($LASTEXITCODE -ne 0) {{
103 if ($LASTEXITCODE -ne 0) {{
104 throw "process exited non-0: $LASTEXITCODE"
104 throw "process exited non-0: $LASTEXITCODE"
105 }}
105 }}
106 '''
106 '''
107
107
108 BUILD_WIX_PYTHON2 = r'''
108 BUILD_WIX_PYTHON2 = r'''
109 Set-Location C:\hgdev\src
109 Set-Location C:\hgdev\src
110 $python = "C:\hgdev\python27-{arch}\python.exe"
110 $python = "C:\hgdev\python27-{arch}\python.exe"
111 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
111 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
112 if ($LASTEXITCODE -ne 0) {{
112 if ($LASTEXITCODE -ne 0) {{
113 throw "process exited non-0: $LASTEXITCODE"
113 throw "process exited non-0: $LASTEXITCODE"
114 }}
114 }}
115 '''
115 '''
116
116
117 RUN_TESTS = r'''
117 RUN_TESTS = r'''
118 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
118 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
119 if ($LASTEXITCODE -ne 0) {{
119 if ($LASTEXITCODE -ne 0) {{
120 throw "process exited non-0: $LASTEXITCODE"
120 throw "process exited non-0: $LASTEXITCODE"
121 }}
121 }}
122 '''
122 '''
123
123
124 WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
124 WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
125 WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
125 WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
126 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
126 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
127 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
127 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
128 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
128 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
129 WHEEL_FILENAME_PYTHON38_X64 = 'mercurial-{version}-cp38-cp38-win_amd64.whl'
129 WHEEL_FILENAME_PYTHON38_X64 = 'mercurial-{version}-cp38-cp38-win_amd64.whl'
130 WHEEL_FILENAME_PYTHON39_X86 = 'mercurial-{version}-cp39-cp39-win32.whl'
130 WHEEL_FILENAME_PYTHON39_X86 = 'mercurial-{version}-cp39-cp39-win32.whl'
131 WHEEL_FILENAME_PYTHON39_X64 = 'mercurial-{version}-cp39-cp39-win_amd64.whl'
131 WHEEL_FILENAME_PYTHON39_X64 = 'mercurial-{version}-cp39-cp39-win_amd64.whl'
132 WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
133 WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
132
134
133 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
135 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
134 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
136 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
135 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
137 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
136 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
138 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
137
139
138 MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
140 MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
139 MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
141 MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
140 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
142 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
141 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
143 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
142
144
143 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
145 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
144
146
145 X86_USER_AGENT_PATTERN = '.*Windows.*'
147 X86_USER_AGENT_PATTERN = '.*Windows.*'
146 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
148 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
147
149
148 EXE_PYTHON2_X86_DESCRIPTION = (
150 EXE_PYTHON2_X86_DESCRIPTION = (
149 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
151 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
150 '- does not require admin rights'
152 '- does not require admin rights'
151 )
153 )
152 EXE_PYTHON2_X64_DESCRIPTION = (
154 EXE_PYTHON2_X64_DESCRIPTION = (
153 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
155 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
154 '- does not require admin rights'
156 '- does not require admin rights'
155 )
157 )
156 # TODO remove Python version once Python 2 is dropped.
158 # TODO remove Python version once Python 2 is dropped.
157 EXE_PYTHON3_X86_DESCRIPTION = (
159 EXE_PYTHON3_X86_DESCRIPTION = (
158 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
160 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
159 '- does not require admin rights'
161 '- does not require admin rights'
160 )
162 )
161 EXE_PYTHON3_X64_DESCRIPTION = (
163 EXE_PYTHON3_X64_DESCRIPTION = (
162 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
164 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
163 '- does not require admin rights'
165 '- does not require admin rights'
164 )
166 )
165 MSI_PYTHON2_X86_DESCRIPTION = (
167 MSI_PYTHON2_X86_DESCRIPTION = (
166 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
168 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
167 '- requires admin rights'
169 '- requires admin rights'
168 )
170 )
169 MSI_PYTHON2_X64_DESCRIPTION = (
171 MSI_PYTHON2_X64_DESCRIPTION = (
170 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
172 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
171 '- requires admin rights'
173 '- requires admin rights'
172 )
174 )
173 MSI_PYTHON3_X86_DESCRIPTION = (
175 MSI_PYTHON3_X86_DESCRIPTION = (
174 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
176 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
175 '- requires admin rights'
177 '- requires admin rights'
176 )
178 )
177 MSI_PYTHON3_X64_DESCRIPTION = (
179 MSI_PYTHON3_X64_DESCRIPTION = (
178 'Mercurial {version} MSI installer - x64 Windows (Python 3) '
180 'Mercurial {version} MSI installer - x64 Windows (Python 3) '
179 '- requires admin rights'
181 '- requires admin rights'
180 )
182 )
181
183
182
184
183 def get_vc_prefix(arch):
185 def get_vc_prefix(arch):
184 if arch == 'x86':
186 if arch == 'x86':
185 return ACTIVATE_VC9_X86
187 return ACTIVATE_VC9_X86
186 elif arch == 'x64':
188 elif arch == 'x64':
187 return ACTIVATE_VC9_AMD64
189 return ACTIVATE_VC9_AMD64
188 else:
190 else:
189 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
191 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
190
192
191
193
192 def fix_authorized_keys_permissions(winrm_client, path):
194 def fix_authorized_keys_permissions(winrm_client, path):
193 commands = [
195 commands = [
194 '$ErrorActionPreference = "Stop"',
196 '$ErrorActionPreference = "Stop"',
195 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
197 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
196 r'icacls %s /remove:g "NT Service\sshd"' % path,
198 r'icacls %s /remove:g "NT Service\sshd"' % path,
197 ]
199 ]
198
200
199 run_powershell(winrm_client, '\n'.join(commands))
201 run_powershell(winrm_client, '\n'.join(commands))
200
202
201
203
202 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
204 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
203 """Synchronize local Mercurial repo to remote EC2 instance."""
205 """Synchronize local Mercurial repo to remote EC2 instance."""
204
206
205 winrm_client = ec2_instance.winrm_client
207 winrm_client = ec2_instance.winrm_client
206
208
207 with tempfile.TemporaryDirectory() as temp_dir:
209 with tempfile.TemporaryDirectory() as temp_dir:
208 temp_dir = pathlib.Path(temp_dir)
210 temp_dir = pathlib.Path(temp_dir)
209
211
210 ssh_dir = temp_dir / '.ssh'
212 ssh_dir = temp_dir / '.ssh'
211 ssh_dir.mkdir()
213 ssh_dir.mkdir()
212 ssh_dir.chmod(0o0700)
214 ssh_dir.chmod(0o0700)
213
215
214 # Generate SSH key to use for communication.
216 # Generate SSH key to use for communication.
215 subprocess.run(
217 subprocess.run(
216 [
218 [
217 'ssh-keygen',
219 'ssh-keygen',
218 '-t',
220 '-t',
219 'rsa',
221 'rsa',
220 '-b',
222 '-b',
221 '4096',
223 '4096',
222 '-N',
224 '-N',
223 '',
225 '',
224 '-f',
226 '-f',
225 str(ssh_dir / 'id_rsa'),
227 str(ssh_dir / 'id_rsa'),
226 ],
228 ],
227 check=True,
229 check=True,
228 capture_output=True,
230 capture_output=True,
229 )
231 )
230
232
231 # Add it to ~/.ssh/authorized_keys on remote.
233 # Add it to ~/.ssh/authorized_keys on remote.
232 # This assumes the file doesn't already exist.
234 # This assumes the file doesn't already exist.
233 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
235 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
234 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
236 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
235 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
237 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
236 fix_authorized_keys_permissions(winrm_client, authorized_keys)
238 fix_authorized_keys_permissions(winrm_client, authorized_keys)
237
239
238 public_ip = ec2_instance.public_ip_address
240 public_ip = ec2_instance.public_ip_address
239
241
240 ssh_config = temp_dir / '.ssh' / 'config'
242 ssh_config = temp_dir / '.ssh' / 'config'
241
243
242 with open(ssh_config, 'w', encoding='utf-8') as fh:
244 with open(ssh_config, 'w', encoding='utf-8') as fh:
243 fh.write('Host %s\n' % public_ip)
245 fh.write('Host %s\n' % public_ip)
244 fh.write(' User Administrator\n')
246 fh.write(' User Administrator\n')
245 fh.write(' StrictHostKeyChecking no\n')
247 fh.write(' StrictHostKeyChecking no\n')
246 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
248 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
247 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
249 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
248
250
249 if not (hg_repo / '.hg').is_dir():
251 if not (hg_repo / '.hg').is_dir():
250 raise Exception(
252 raise Exception(
251 '%s is not a Mercurial repository; '
253 '%s is not a Mercurial repository; '
252 'synchronization not yet supported' % hg_repo
254 'synchronization not yet supported' % hg_repo
253 )
255 )
254
256
255 env = dict(os.environ)
257 env = dict(os.environ)
256 env['HGPLAIN'] = '1'
258 env['HGPLAIN'] = '1'
257 env['HGENCODING'] = 'utf-8'
259 env['HGENCODING'] = 'utf-8'
258
260
259 hg_bin = hg_repo / 'hg'
261 hg_bin = hg_repo / 'hg'
260
262
261 res = subprocess.run(
263 res = subprocess.run(
262 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
264 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
263 cwd=str(hg_repo),
265 cwd=str(hg_repo),
264 env=env,
266 env=env,
265 check=True,
267 check=True,
266 capture_output=True,
268 capture_output=True,
267 )
269 )
268
270
269 full_revision = res.stdout.decode('ascii')
271 full_revision = res.stdout.decode('ascii')
270
272
271 args = [
273 args = [
272 'python2.7',
274 'python2.7',
273 hg_bin,
275 hg_bin,
274 '--config',
276 '--config',
275 'ui.ssh=ssh -F %s' % ssh_config,
277 'ui.ssh=ssh -F %s' % ssh_config,
276 '--config',
278 '--config',
277 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
279 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
278 # Also ensure .hgtags changes are present so auto version
280 # Also ensure .hgtags changes are present so auto version
279 # calculation works.
281 # calculation works.
280 'push',
282 'push',
281 '-f',
283 '-f',
282 '-r',
284 '-r',
283 full_revision,
285 full_revision,
284 '-r',
286 '-r',
285 'file(.hgtags)',
287 'file(.hgtags)',
286 'ssh://%s/c:/hgdev/src' % public_ip,
288 'ssh://%s/c:/hgdev/src' % public_ip,
287 ]
289 ]
288
290
289 res = subprocess.run(args, cwd=str(hg_repo), env=env)
291 res = subprocess.run(args, cwd=str(hg_repo), env=env)
290
292
291 # Allow 1 (no-op) to not trigger error.
293 # Allow 1 (no-op) to not trigger error.
292 if res.returncode not in (0, 1):
294 if res.returncode not in (0, 1):
293 res.check_returncode()
295 res.check_returncode()
294
296
295 run_powershell(
297 run_powershell(
296 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
298 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
297 )
299 )
298
300
299 # TODO detect dirty local working directory and synchronize accordingly.
301 # TODO detect dirty local working directory and synchronize accordingly.
300
302
301
303
302 def purge_hg(winrm_client):
304 def purge_hg(winrm_client):
303 """Purge the Mercurial source repository on an EC2 instance."""
305 """Purge the Mercurial source repository on an EC2 instance."""
304 run_powershell(winrm_client, HG_PURGE)
306 run_powershell(winrm_client, HG_PURGE)
305
307
306
308
307 def find_latest_dist(winrm_client, pattern):
309 def find_latest_dist(winrm_client, pattern):
308 """Find path to newest file in dist/ directory matching a pattern."""
310 """Find path to newest file in dist/ directory matching a pattern."""
309
311
310 res = winrm_client.execute_ps(
312 res = winrm_client.execute_ps(
311 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
313 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
312 '| Sort-Object LastWriteTime -Descending '
314 '| Sort-Object LastWriteTime -Descending '
313 '| Select-Object -First 1\n'
315 '| Select-Object -First 1\n'
314 '$v.name' % pattern
316 '$v.name' % pattern
315 )
317 )
316 return res[0]
318 return res[0]
317
319
318
320
319 def copy_latest_dist(winrm_client, pattern, dest_path):
321 def copy_latest_dist(winrm_client, pattern, dest_path):
320 """Copy latest file matching pattern in dist/ directory.
322 """Copy latest file matching pattern in dist/ directory.
321
323
322 Given a WinRM client and a file pattern, find the latest file on the remote
324 Given a WinRM client and a file pattern, find the latest file on the remote
323 matching that pattern and copy it to the ``dest_path`` directory on the
325 matching that pattern and copy it to the ``dest_path`` directory on the
324 local machine.
326 local machine.
325 """
327 """
326 latest = find_latest_dist(winrm_client, pattern)
328 latest = find_latest_dist(winrm_client, pattern)
327 source = r'C:\hgdev\src\dist\%s' % latest
329 source = r'C:\hgdev\src\dist\%s' % latest
328 dest = dest_path / latest
330 dest = dest_path / latest
329 print('copying %s to %s' % (source, dest))
331 print('copying %s to %s' % (source, dest))
330 winrm_client.fetch(source, str(dest))
332 winrm_client.fetch(source, str(dest))
331
333
332
334
333 def build_inno_installer(
335 def build_inno_installer(
334 winrm_client,
336 winrm_client,
335 python_version: int,
337 python_version: int,
336 arch: str,
338 arch: str,
337 dest_path: pathlib.Path,
339 dest_path: pathlib.Path,
338 version=None,
340 version=None,
339 ):
341 ):
340 """Build the Inno Setup installer on a remote machine.
342 """Build the Inno Setup installer on a remote machine.
341
343
342 Using a WinRM client, remote commands are executed to build
344 Using a WinRM client, remote commands are executed to build
343 a Mercurial Inno Setup installer.
345 a Mercurial Inno Setup installer.
344 """
346 """
345 print(
347 print(
346 'building Inno Setup installer for Python %d %s'
348 'building Inno Setup installer for Python %d %s'
347 % (python_version, arch)
349 % (python_version, arch)
348 )
350 )
349
351
350 if python_version == 3:
352 if python_version == 3:
351 # TODO fix this limitation in packaging code
353 # TODO fix this limitation in packaging code
352 if not version:
354 if not version:
353 raise Exception(
355 raise Exception(
354 "version string is required when building for Python 3"
356 "version string is required when building for Python 3"
355 )
357 )
356
358
357 if arch == "x86":
359 if arch == "x86":
358 target_triple = "i686-pc-windows-msvc"
360 target_triple = "i686-pc-windows-msvc"
359 elif arch == "x64":
361 elif arch == "x64":
360 target_triple = "x86_64-pc-windows-msvc"
362 target_triple = "x86_64-pc-windows-msvc"
361 else:
363 else:
362 raise Exception("unhandled arch: %s" % arch)
364 raise Exception("unhandled arch: %s" % arch)
363
365
364 ps = BUILD_INNO_PYTHON3.format(
366 ps = BUILD_INNO_PYTHON3.format(
365 pyoxidizer_target=target_triple,
367 pyoxidizer_target=target_triple,
366 version=version,
368 version=version,
367 )
369 )
368 else:
370 else:
369 extra_args = []
371 extra_args = []
370 if version:
372 if version:
371 extra_args.extend(['--version', version])
373 extra_args.extend(['--version', version])
372
374
373 ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
375 ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
374 arch=arch, extra_args=' '.join(extra_args)
376 arch=arch, extra_args=' '.join(extra_args)
375 )
377 )
376
378
377 run_powershell(winrm_client, ps)
379 run_powershell(winrm_client, ps)
378 copy_latest_dist(winrm_client, '*.exe', dest_path)
380 copy_latest_dist(winrm_client, '*.exe', dest_path)
379
381
380
382
381 def build_wheel(
383 def build_wheel(
382 winrm_client, python_version: str, arch: str, dest_path: pathlib.Path
384 winrm_client, python_version: str, arch: str, dest_path: pathlib.Path
383 ):
385 ):
384 """Build Python wheels on a remote machine.
386 """Build Python wheels on a remote machine.
385
387
386 Using a WinRM client, remote commands are executed to build a Python wheel
388 Using a WinRM client, remote commands are executed to build a Python wheel
387 for Mercurial.
389 for Mercurial.
388 """
390 """
389 print('Building Windows wheel for Python %s %s' % (python_version, arch))
391 print('Building Windows wheel for Python %s %s' % (python_version, arch))
390
392
391 ps = BUILD_WHEEL.format(
393 ps = BUILD_WHEEL.format(
392 python_version=python_version.replace(".", ""), arch=arch
394 python_version=python_version.replace(".", ""), arch=arch
393 )
395 )
394
396
395 # Python 2.7 requires an activated environment.
397 # Python 2.7 requires an activated environment.
396 if python_version == "2.7":
398 if python_version == "2.7":
397 ps = get_vc_prefix(arch) + ps
399 ps = get_vc_prefix(arch) + ps
398
400
399 run_powershell(winrm_client, ps)
401 run_powershell(winrm_client, ps)
400 copy_latest_dist(winrm_client, '*.whl', dest_path)
402 copy_latest_dist(winrm_client, '*.whl', dest_path)
401
403
402
404
403 def build_wix_installer(
405 def build_wix_installer(
404 winrm_client,
406 winrm_client,
405 python_version: int,
407 python_version: int,
406 arch: str,
408 arch: str,
407 dest_path: pathlib.Path,
409 dest_path: pathlib.Path,
408 version=None,
410 version=None,
409 ):
411 ):
410 """Build the WiX installer on a remote machine.
412 """Build the WiX installer on a remote machine.
411
413
412 Using a WinRM client, remote commands are executed to build a WiX installer.
414 Using a WinRM client, remote commands are executed to build a WiX installer.
413 """
415 """
414 print('Building WiX installer for Python %d %s' % (python_version, arch))
416 print('Building WiX installer for Python %d %s' % (python_version, arch))
415
417
416 if python_version == 3:
418 if python_version == 3:
417 # TODO fix this limitation in packaging code
419 # TODO fix this limitation in packaging code
418 if not version:
420 if not version:
419 raise Exception(
421 raise Exception(
420 "version string is required when building for Python 3"
422 "version string is required when building for Python 3"
421 )
423 )
422
424
423 if arch == "x86":
425 if arch == "x86":
424 target_triple = "i686-pc-windows-msvc"
426 target_triple = "i686-pc-windows-msvc"
425 elif arch == "x64":
427 elif arch == "x64":
426 target_triple = "x86_64-pc-windows-msvc"
428 target_triple = "x86_64-pc-windows-msvc"
427 else:
429 else:
428 raise Exception("unhandled arch: %s" % arch)
430 raise Exception("unhandled arch: %s" % arch)
429
431
430 ps = BUILD_WIX_PYTHON3.format(
432 ps = BUILD_WIX_PYTHON3.format(
431 pyoxidizer_target=target_triple,
433 pyoxidizer_target=target_triple,
432 version=version,
434 version=version,
433 )
435 )
434 else:
436 else:
435 extra_args = []
437 extra_args = []
436 if version:
438 if version:
437 extra_args.extend(['--version', version])
439 extra_args.extend(['--version', version])
438
440
439 ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
441 ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
440 arch=arch, extra_args=' '.join(extra_args)
442 arch=arch, extra_args=' '.join(extra_args)
441 )
443 )
442
444
443 run_powershell(winrm_client, ps)
445 run_powershell(winrm_client, ps)
444 copy_latest_dist(winrm_client, '*.msi', dest_path)
446 copy_latest_dist(winrm_client, '*.msi', dest_path)
445
447
446
448
447 def run_tests(winrm_client, python_version, arch, test_flags=''):
449 def run_tests(winrm_client, python_version, arch, test_flags=''):
448 """Run tests on a remote Windows machine.
450 """Run tests on a remote Windows machine.
449
451
450 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
452 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
451 ``arch`` is ``x86`` or ``x64``.
453 ``arch`` is ``x86`` or ``x64``.
452 ``test_flags`` is a str representing extra arguments to pass to
454 ``test_flags`` is a str representing extra arguments to pass to
453 ``run-tests.py``.
455 ``run-tests.py``.
454 """
456 """
455 if not re.match(r'\d\.\d', python_version):
457 if not re.match(r'\d\.\d', python_version):
456 raise ValueError(
458 raise ValueError(
457 r'python_version must be \d.\d; got %s' % python_version
459 r'python_version must be \d.\d; got %s' % python_version
458 )
460 )
459
461
460 if arch not in ('x86', 'x64'):
462 if arch not in ('x86', 'x64'):
461 raise ValueError('arch must be x86 or x64; got %s' % arch)
463 raise ValueError('arch must be x86 or x64; got %s' % arch)
462
464
463 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
465 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
464
466
465 ps = RUN_TESTS.format(
467 ps = RUN_TESTS.format(
466 python_path=python_path,
468 python_path=python_path,
467 test_flags=test_flags or '',
469 test_flags=test_flags or '',
468 )
470 )
469
471
470 run_powershell(winrm_client, ps)
472 run_powershell(winrm_client, ps)
471
473
472
474
473 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
475 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
474 return (
476 return (
475 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
477 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
476 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
478 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
477 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
479 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
478 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
480 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
479 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
481 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
480 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
482 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
481 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
483 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
482 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
484 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
485 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
486 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
483 )
487 )
484
488
485
489
486 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
490 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
487 return (
491 return (
488 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
492 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
489 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
493 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
490 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
494 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
491 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
495 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
492 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
496 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
493 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
497 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
494 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
498 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
495 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
499 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
500 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
501 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
496 dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
502 dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
497 dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
503 dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
498 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
504 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
499 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
505 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
500 dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
506 dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
501 dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
507 dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
502 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
508 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
503 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
509 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
504 )
510 )
505
511
506
512
507 def generate_latest_dat(version: str):
513 def generate_latest_dat(version: str):
508 python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
514 python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
509 python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
515 python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
510 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
516 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
511 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
517 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
512 python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
518 python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
513 python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
519 python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
514 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
520 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
515 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
521 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
516
522
517 entries = (
523 entries = (
518 (
524 (
519 '10',
525 '10',
520 version,
526 version,
521 X86_USER_AGENT_PATTERN,
527 X86_USER_AGENT_PATTERN,
522 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_exe_filename),
528 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_exe_filename),
523 EXE_PYTHON3_X86_DESCRIPTION.format(version=version),
529 EXE_PYTHON3_X86_DESCRIPTION.format(version=version),
524 ),
530 ),
525 (
531 (
526 '10',
532 '10',
527 version,
533 version,
528 X64_USER_AGENT_PATTERN,
534 X64_USER_AGENT_PATTERN,
529 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_exe_filename),
535 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_exe_filename),
530 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
536 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
531 ),
537 ),
532 (
538 (
533 '9',
539 '9',
534 version,
540 version,
535 X86_USER_AGENT_PATTERN,
541 X86_USER_AGENT_PATTERN,
536 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
542 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
537 EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
543 EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
538 ),
544 ),
539 (
545 (
540 '9',
546 '9',
541 version,
547 version,
542 X64_USER_AGENT_PATTERN,
548 X64_USER_AGENT_PATTERN,
543 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
549 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
544 EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
550 EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
545 ),
551 ),
546 (
552 (
547 '10',
553 '10',
548 version,
554 version,
549 X86_USER_AGENT_PATTERN,
555 X86_USER_AGENT_PATTERN,
550 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_msi_filename),
556 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_msi_filename),
551 MSI_PYTHON3_X86_DESCRIPTION.format(version=version),
557 MSI_PYTHON3_X86_DESCRIPTION.format(version=version),
552 ),
558 ),
553 (
559 (
554 '10',
560 '10',
555 version,
561 version,
556 X64_USER_AGENT_PATTERN,
562 X64_USER_AGENT_PATTERN,
557 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
563 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
558 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
564 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
559 ),
565 ),
560 (
566 (
561 '9',
567 '9',
562 version,
568 version,
563 X86_USER_AGENT_PATTERN,
569 X86_USER_AGENT_PATTERN,
564 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
570 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
565 MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
571 MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
566 ),
572 ),
567 (
573 (
568 '9',
574 '9',
569 version,
575 version,
570 X64_USER_AGENT_PATTERN,
576 X64_USER_AGENT_PATTERN,
571 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
577 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
572 MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
578 MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
573 ),
579 ),
574 )
580 )
575
581
576 lines = ['\t'.join(e) for e in entries]
582 lines = ['\t'.join(e) for e in entries]
577
583
578 return '\n'.join(lines) + '\n'
584 return '\n'.join(lines) + '\n'
579
585
580
586
581 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
587 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
582 """Publish Windows release artifacts to PyPI."""
588 """Publish Windows release artifacts to PyPI."""
583
589
584 wheel_paths = resolve_wheel_artifacts(dist_path, version)
590 wheel_paths = resolve_wheel_artifacts(dist_path, version)
585
591
586 for p in wheel_paths:
592 for p in wheel_paths:
587 if not p.exists():
593 if not p.exists():
588 raise Exception('%s not found' % p)
594 raise Exception('%s not found' % p)
589
595
590 print('uploading wheels to PyPI (you may be prompted for credentials)')
596 print('uploading wheels to PyPI (you may be prompted for credentials)')
591 pypi_upload(wheel_paths)
597 pypi_upload(wheel_paths)
592
598
593
599
594 def publish_artifacts_mercurial_scm_org(
600 def publish_artifacts_mercurial_scm_org(
595 dist_path: pathlib.Path, version: str, ssh_username=None
601 dist_path: pathlib.Path, version: str, ssh_username=None
596 ):
602 ):
597 """Publish Windows release artifacts to mercurial-scm.org."""
603 """Publish Windows release artifacts to mercurial-scm.org."""
598 all_paths = resolve_all_artifacts(dist_path, version)
604 all_paths = resolve_all_artifacts(dist_path, version)
599
605
600 for p in all_paths:
606 for p in all_paths:
601 if not p.exists():
607 if not p.exists():
602 raise Exception('%s not found' % p)
608 raise Exception('%s not found' % p)
603
609
604 client = paramiko.SSHClient()
610 client = paramiko.SSHClient()
605 client.load_system_host_keys()
611 client.load_system_host_keys()
606 # We assume the system SSH configuration knows how to connect.
612 # We assume the system SSH configuration knows how to connect.
607 print('connecting to mercurial-scm.org via ssh...')
613 print('connecting to mercurial-scm.org via ssh...')
608 try:
614 try:
609 client.connect('mercurial-scm.org', username=ssh_username)
615 client.connect('mercurial-scm.org', username=ssh_username)
610 except paramiko.AuthenticationException:
616 except paramiko.AuthenticationException:
611 print('error authenticating; is an SSH key available in an SSH agent?')
617 print('error authenticating; is an SSH key available in an SSH agent?')
612 raise
618 raise
613
619
614 print('SSH connection established')
620 print('SSH connection established')
615
621
616 print('opening SFTP client...')
622 print('opening SFTP client...')
617 sftp = client.open_sftp()
623 sftp = client.open_sftp()
618 print('SFTP client obtained')
624 print('SFTP client obtained')
619
625
620 for p in all_paths:
626 for p in all_paths:
621 dest_path = '/var/www/release/windows/%s' % p.name
627 dest_path = '/var/www/release/windows/%s' % p.name
622 print('uploading %s to %s' % (p, dest_path))
628 print('uploading %s to %s' % (p, dest_path))
623
629
624 with p.open('rb') as fh:
630 with p.open('rb') as fh:
625 data = fh.read()
631 data = fh.read()
626
632
627 with sftp.open(dest_path, 'wb') as fh:
633 with sftp.open(dest_path, 'wb') as fh:
628 fh.write(data)
634 fh.write(data)
629 fh.chmod(0o0664)
635 fh.chmod(0o0664)
630
636
631 latest_dat_path = '/var/www/release/windows/latest.dat'
637 latest_dat_path = '/var/www/release/windows/latest.dat'
632
638
633 now = datetime.datetime.utcnow()
639 now = datetime.datetime.utcnow()
634 backup_path = dist_path / (
640 backup_path = dist_path / (
635 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
641 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
636 )
642 )
637 print('backing up %s to %s' % (latest_dat_path, backup_path))
643 print('backing up %s to %s' % (latest_dat_path, backup_path))
638
644
639 with sftp.open(latest_dat_path, 'rb') as fh:
645 with sftp.open(latest_dat_path, 'rb') as fh:
640 latest_dat_old = fh.read()
646 latest_dat_old = fh.read()
641
647
642 with backup_path.open('wb') as fh:
648 with backup_path.open('wb') as fh:
643 fh.write(latest_dat_old)
649 fh.write(latest_dat_old)
644
650
645 print('writing %s with content:' % latest_dat_path)
651 print('writing %s with content:' % latest_dat_path)
646 latest_dat_content = generate_latest_dat(version)
652 latest_dat_content = generate_latest_dat(version)
647 print(latest_dat_content)
653 print(latest_dat_content)
648
654
649 with sftp.open(latest_dat_path, 'wb') as fh:
655 with sftp.open(latest_dat_path, 'wb') as fh:
650 fh.write(latest_dat_content.encode('ascii'))
656 fh.write(latest_dat_content.encode('ascii'))
651
657
652
658
653 def publish_artifacts(
659 def publish_artifacts(
654 dist_path: pathlib.Path,
660 dist_path: pathlib.Path,
655 version: str,
661 version: str,
656 pypi=True,
662 pypi=True,
657 mercurial_scm_org=True,
663 mercurial_scm_org=True,
658 ssh_username=None,
664 ssh_username=None,
659 ):
665 ):
660 """Publish Windows release artifacts.
666 """Publish Windows release artifacts.
661
667
662 Files are found in `dist_path`. We will look for files with version string
668 Files are found in `dist_path`. We will look for files with version string
663 `version`.
669 `version`.
664
670
665 `pypi` controls whether we upload to PyPI.
671 `pypi` controls whether we upload to PyPI.
666 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
672 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
667 """
673 """
668 if pypi:
674 if pypi:
669 publish_artifacts_pypi(dist_path, version)
675 publish_artifacts_pypi(dist_path, version)
670
676
671 if mercurial_scm_org:
677 if mercurial_scm_org:
672 publish_artifacts_mercurial_scm_org(
678 publish_artifacts_mercurial_scm_org(
673 dist_path, version, ssh_username=ssh_username
679 dist_path, version, ssh_username=ssh_username
674 )
680 )
@@ -1,188 +1,193 b''
1 stages:
1 stages:
2 - tests
2 - tests
3 - phabricator
3 - phabricator
4
4
5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
6
6
7 variables:
7 variables:
8 PYTHON: python
8 PYTHON: python
9 TEST_HGMODULEPOLICY: "allow"
9 TEST_HGMODULEPOLICY: "allow"
10 HG_CI_IMAGE_TAG: "latest"
10 HG_CI_IMAGE_TAG: "latest"
11 TEST_HGTESTS_ALLOW_NETIO: "0"
11 TEST_HGTESTS_ALLOW_NETIO: "0"
12
12
13 .all_template: &all
13 .all_template: &all
14 when: always
14 when: always
15
15
16 .runtests_template: &runtests
16 .runtests_template: &runtests
17 <<: *all
17 <<: *all
18 stage: tests
18 stage: tests
19 # The runner made a clone as root.
19 # The runner made a clone as root.
20 # We make a new clone owned by user used to run the step.
20 # We make a new clone owned by user used to run the step.
21 before_script:
21 before_script:
22 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
22 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
23 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
23 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
24 - cd /tmp/mercurial-ci/
24 - cd /tmp/mercurial-ci/
25 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
25 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
26 - black --version
26 - black --version
27 - clang-format --version
27 - clang-format --version
28 script:
28 script:
29 - echo "python used, $PYTHON"
29 - echo "python used, $PYTHON"
30 - echo "$RUNTEST_ARGS"
30 - echo "$RUNTEST_ARGS"
31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
32
32
33 checks-py2:
33 checks-py2:
34 <<: *runtests
34 <<: *runtests
35 variables:
35 variables:
36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
37
37
38 checks-py3:
38 checks-py3:
39 <<: *runtests
39 <<: *runtests
40 variables:
40 variables:
41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
42 PYTHON: python3
42 PYTHON: python3
43
43
44 rust-cargo-test-py2: &rust_cargo_test
44 rust-cargo-test-py2: &rust_cargo_test
45 <<: *all
45 <<: *all
46 stage: tests
46 stage: tests
47 script:
47 script:
48 - echo "python used, $PYTHON"
48 - echo "python used, $PYTHON"
49 - make rust-tests
49 - make rust-tests
50
50
51 rust-cargo-test-py3:
51 rust-cargo-test-py3:
52 stage: tests
52 stage: tests
53 <<: *rust_cargo_test
53 <<: *rust_cargo_test
54 variables:
54 variables:
55 PYTHON: python3
55 PYTHON: python3
56
56
57 phabricator-refresh:
57 phabricator-refresh:
58 stage: phabricator
58 stage: phabricator
59 rules:
60 - if: '"$PHABRICATOR_TOKEN" != "NO-PHAB"'
61 when: on_success
62 - if: '"$PHABRICATOR_TOKEN" == "NO-PHAB"'
63 when: never
59 variables:
64 variables:
60 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
65 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
61 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
66 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
62 script:
67 script:
63 - |
68 - |
64 if [ `hg branch` == "stable" ]; then
69 if [ `hg branch` == "stable" ]; then
65 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
70 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
66 else
71 else
67 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
72 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
68 fi
73 fi
69
74
70 test-py2:
75 test-py2:
71 <<: *runtests
76 <<: *runtests
72 variables:
77 variables:
73 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
78 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
74 TEST_HGMODULEPOLICY: "c"
79 TEST_HGMODULEPOLICY: "c"
75 TEST_HGTESTS_ALLOW_NETIO: "1"
80 TEST_HGTESTS_ALLOW_NETIO: "1"
76
81
77 test-py3:
82 test-py3:
78 <<: *runtests
83 <<: *runtests
79 variables:
84 variables:
80 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
85 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
81 PYTHON: python3
86 PYTHON: python3
82 TEST_HGMODULEPOLICY: "c"
87 TEST_HGMODULEPOLICY: "c"
83 TEST_HGTESTS_ALLOW_NETIO: "1"
88 TEST_HGTESTS_ALLOW_NETIO: "1"
84
89
85 test-py2-pure:
90 test-py2-pure:
86 <<: *runtests
91 <<: *runtests
87 variables:
92 variables:
88 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
93 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
89 TEST_HGMODULEPOLICY: "py"
94 TEST_HGMODULEPOLICY: "py"
90
95
91 test-py3-pure:
96 test-py3-pure:
92 <<: *runtests
97 <<: *runtests
93 variables:
98 variables:
94 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
99 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
95 PYTHON: python3
100 PYTHON: python3
96 TEST_HGMODULEPOLICY: "py"
101 TEST_HGMODULEPOLICY: "py"
97
102
98 test-py2-rust:
103 test-py2-rust:
99 <<: *runtests
104 <<: *runtests
100 variables:
105 variables:
101 HGWITHRUSTEXT: cpython
106 HGWITHRUSTEXT: cpython
102 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
107 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
103 TEST_HGMODULEPOLICY: "rust+c"
108 TEST_HGMODULEPOLICY: "rust+c"
104
109
105 test-py3-rust:
110 test-py3-rust:
106 <<: *runtests
111 <<: *runtests
107 variables:
112 variables:
108 HGWITHRUSTEXT: cpython
113 HGWITHRUSTEXT: cpython
109 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
114 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
110 PYTHON: python3
115 PYTHON: python3
111 TEST_HGMODULEPOLICY: "rust+c"
116 TEST_HGMODULEPOLICY: "rust+c"
112
117
113 test-py3-rhg:
118 test-py3-rhg:
114 <<: *runtests
119 <<: *runtests
115 variables:
120 variables:
116 HGWITHRUSTEXT: cpython
121 HGWITHRUSTEXT: cpython
117 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
122 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
118 PYTHON: python3
123 PYTHON: python3
119 TEST_HGMODULEPOLICY: "rust+c"
124 TEST_HGMODULEPOLICY: "rust+c"
120
125
121 test-py2-chg:
126 test-py2-chg:
122 <<: *runtests
127 <<: *runtests
123 variables:
128 variables:
124 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
129 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
125 TEST_HGMODULEPOLICY: "c"
130 TEST_HGMODULEPOLICY: "c"
126
131
127 test-py3-chg:
132 test-py3-chg:
128 <<: *runtests
133 <<: *runtests
129 variables:
134 variables:
130 PYTHON: python3
135 PYTHON: python3
131 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
136 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
132 TEST_HGMODULEPOLICY: "c"
137 TEST_HGMODULEPOLICY: "c"
133
138
134 check-pytype-py3:
139 check-pytype-py3:
135 extends: .runtests_template
140 extends: .runtests_template
136 before_script:
141 before_script:
137 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
142 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
138 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
143 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
139 - cd /tmp/mercurial-ci/
144 - cd /tmp/mercurial-ci/
140 - make local PYTHON=$PYTHON
145 - make local PYTHON=$PYTHON
141 - $PYTHON -m pip install --user -U pytype==2021.04.15
146 - $PYTHON -m pip install --user -U pytype==2021.04.15
142 variables:
147 variables:
143 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
148 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
144 HGTEST_SLOWTIMEOUT: "3600"
149 HGTEST_SLOWTIMEOUT: "3600"
145 PYTHON: python3
150 PYTHON: python3
146 TEST_HGMODULEPOLICY: "c"
151 TEST_HGMODULEPOLICY: "c"
147
152
148 # `sh.exe --login` sets a couple of extra environment variables that are defined
153 # `sh.exe --login` sets a couple of extra environment variables that are defined
149 # in the MinGW shell, but switches CWD to /home/$username. The previous value
154 # in the MinGW shell, but switches CWD to /home/$username. The previous value
150 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
155 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
151 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
156 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
152 # that satisfies the various shebang lines and delegates to `py -3`.
157 # that satisfies the various shebang lines and delegates to `py -3`.
153 .window_runtests_template: &windows_runtests
158 .window_runtests_template: &windows_runtests
154 <<: *all
159 <<: *all
155 when: manual # we don't have any Windows runners anymore at the moment
160 when: manual # we don't have any Windows runners anymore at the moment
156 stage: tests
161 stage: tests
157 before_script:
162 before_script:
158 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
163 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
159 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
164 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
160
165
161 script:
166 script:
162 - echo "Entering script section"
167 - echo "Entering script section"
163 - echo "python used, $Env:PYTHON"
168 - echo "python used, $Env:PYTHON"
164 - Invoke-Expression "$Env:PYTHON -V"
169 - Invoke-Expression "$Env:PYTHON -V"
165 - Invoke-Expression "$Env:PYTHON -m black --version"
170 - Invoke-Expression "$Env:PYTHON -m black --version"
166 - echo "$Env:RUNTEST_ARGS"
171 - echo "$Env:RUNTEST_ARGS"
167 - echo "$Env:TMP"
172 - echo "$Env:TMP"
168 - echo "$Env:TEMP"
173 - echo "$Env:TEMP"
169
174
170 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
175 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
171
176
172 windows-py3:
177 windows-py3:
173 <<: *windows_runtests
178 <<: *windows_runtests
174 tags:
179 tags:
175 - windows
180 - windows
176 variables:
181 variables:
177 TEST_HGMODULEPOLICY: "c"
182 TEST_HGMODULEPOLICY: "c"
178 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
183 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
179 PYTHON: py -3
184 PYTHON: py -3
180
185
181 windows-py3-pyox:
186 windows-py3-pyox:
182 <<: *windows_runtests
187 <<: *windows_runtests
183 tags:
188 tags:
184 - windows
189 - windows
185 variables:
190 variables:
186 TEST_HGMODULEPOLICY: "c"
191 TEST_HGMODULEPOLICY: "c"
187 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
192 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
188 PYTHON: py -3
193 PYTHON: py -3
@@ -1,189 +1,198 b''
1 # install-dependencies.ps1 - Install Windows dependencies for building Mercurial
1 # install-dependencies.ps1 - Install Windows dependencies for building Mercurial
2 #
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This script can be used to bootstrap a Mercurial build environment on
8 # This script can be used to bootstrap a Mercurial build environment on
9 # Windows.
9 # Windows.
10 #
10 #
11 # The script makes a lot of assumptions about how things should work.
11 # The script makes a lot of assumptions about how things should work.
12 # For example, the install location of Python is hardcoded to c:\hgdev\*.
12 # For example, the install location of Python is hardcoded to c:\hgdev\*.
13 #
13 #
14 # The script should be executed from a PowerShell with elevated privileges
14 # The script should be executed from a PowerShell with elevated privileges
15 # if you don't want to see a UAC prompt for various installers.
15 # if you don't want to see a UAC prompt for various installers.
16 #
16 #
17 # The script is tested on Windows 10 and Windows Server 2019 (in EC2).
17 # The script is tested on Windows 10 and Windows Server 2019 (in EC2).
18
18
19 $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe"
19 $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe"
20 $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139"
20 $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139"
21
21
22 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe"
22 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe"
23 $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220"
23 $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220"
24 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe"
24 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe"
25 $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987"
25 $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987"
26
26
27 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10.exe"
27 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10.exe"
28 $PYTHON38_x86_SHA256 = "ad07633a1f0cd795f3bf9da33729f662281df196b4567fa795829f3bb38a30ac"
28 $PYTHON38_x86_SHA256 = "ad07633a1f0cd795f3bf9da33729f662281df196b4567fa795829f3bb38a30ac"
29 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
29 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
30 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
30 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
31
31
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5.exe"
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9.exe"
33 $PYTHON39_x86_SHA256 = "505129081a839b699a6ab9064b441ad922ef03767b5dd4241fd0c2166baf64de"
33 $PYTHON39_x86_SHA256 = "6646a5683adf14d35e8c53aab946895bc0f0b825f7acac3a62cc85ee7d0dc71a"
34 $PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5-amd64.exe"
34 $PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9-amd64.exe"
35 $PYTHON39_x64_SHA256 = "84d5243088ba00c11e51905c704dbe041040dfff044f4e1ce5476844ee2e6eac"
35 $PYTHON39_x64_SHA256 = "137d59e5c0b01a8f1bdcba08344402ae658c81c6bf03b6602bd8b4e951ad0714"
36
37 $PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0.exe"
38 $PYTHON310_x86_SHA256 = "ea896eeefb1db9e12fb89ec77a6e28c9fe52b4a162a34c85d9688be2ec2392e8"
39 $PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe"
40 $PYTHON310_x64_SHA256 = "cb580eb7dc55f9198e650f016645023e8b2224cf7d033857d12880b46c5c94ef"
36
41
37 # PIP 19.2.3.
42 # PIP 19.2.3.
38 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
43 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
39 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
44 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
40
45
41 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
46 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
42 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
47 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
43
48
44 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
49 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
45 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
50 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
46
51
47 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.8.1-cp39-cp39-win_amd64.whl"
52 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.8.1-cp39-cp39-win_amd64.whl"
48 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/5c/b5/a5fa664761eef29b6c90eb24cb09ab8fe2c9b4b86af41d42c17476aff29b/$MERCURIAL_WHEEL_FILENAME"
53 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/5c/b5/a5fa664761eef29b6c90eb24cb09ab8fe2c9b4b86af41d42c17476aff29b/$MERCURIAL_WHEEL_FILENAME"
49 $MERCURIAL_WHEEL_SHA256 = "cbf3efa68fd7ebf94691bd00d2c86bbd47ca73620c8faa4f18b6c394bf5f82b0"
54 $MERCURIAL_WHEEL_SHA256 = "cbf3efa68fd7ebf94691bd00d2c86bbd47ca73620c8faa4f18b6c394bf5f82b0"
50
55
51 $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe"
56 $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe"
52 $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72"
57 $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72"
53
58
54 $PYOXIDIZER_URL = "https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.17/PyOxidizer-0.17.0-x64.msi"
59 $PYOXIDIZER_URL = "https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.17/PyOxidizer-0.17.0-x64.msi"
55 $PYOXIDIZER_SHA256 = "85c3bc21a18eb5e2db4dad87cca29accf725c7d59dd364a853ab5099c272024b"
60 $PYOXIDIZER_SHA256 = "85c3bc21a18eb5e2db4dad87cca29accf725c7d59dd364a853ab5099c272024b"
56
61
57 # Writing progress slows down downloads substantially. So disable it.
62 # Writing progress slows down downloads substantially. So disable it.
58 $progressPreference = 'silentlyContinue'
63 $progressPreference = 'silentlyContinue'
59
64
60 function Secure-Download($url, $path, $sha256) {
65 function Secure-Download($url, $path, $sha256) {
61 if (Test-Path -Path $path) {
66 if (Test-Path -Path $path) {
62 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
67 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
63
68
64 if ($hash.Hash -eq $sha256) {
69 if ($hash.Hash -eq $sha256) {
65 Write-Output "SHA256 of $path verified as $sha256"
70 Write-Output "SHA256 of $path verified as $sha256"
66 return
71 return
67 }
72 }
68
73
69 Write-Output "hash mismatch on $path; downloading again"
74 Write-Output "hash mismatch on $path; downloading again"
70 }
75 }
71
76
72 Write-Output "downloading $url to $path"
77 Write-Output "downloading $url to $path"
73 Invoke-WebRequest -Uri $url -OutFile $path
78 Invoke-WebRequest -Uri $url -OutFile $path
74 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
79 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
75
80
76 if ($hash.Hash -ne $sha256) {
81 if ($hash.Hash -ne $sha256) {
77 Remove-Item -Path $path
82 Remove-Item -Path $path
78 throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256"
83 throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256"
79 }
84 }
80 }
85 }
81
86
82 function Invoke-Process($path, $arguments) {
87 function Invoke-Process($path, $arguments) {
83 echo "$path $arguments"
88 echo "$path $arguments"
84
89
85 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
90 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
86
91
87 if ($p.ExitCode -ne 0) {
92 if ($p.ExitCode -ne 0) {
88 throw "process exited non-0: $($p.ExitCode)"
93 throw "process exited non-0: $($p.ExitCode)"
89 }
94 }
90 }
95 }
91
96
92 function Install-Python3($name, $installer, $dest, $pip) {
97 function Install-Python3($name, $installer, $dest, $pip) {
93 Write-Output "installing $name"
98 Write-Output "installing $name"
94
99
95 # We hit this when running the script as part of Simple Systems Manager in
100 # We hit this when running the script as part of Simple Systems Manager in
96 # EC2. The Python 3 installer doesn't seem to like per-user installs
101 # EC2. The Python 3 installer doesn't seem to like per-user installs
97 # when running as the SYSTEM user. So enable global installs if executed in
102 # when running as the SYSTEM user. So enable global installs if executed in
98 # this mode.
103 # this mode.
99 if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") {
104 if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") {
100 Write-Output "running with SYSTEM account; installing for all users"
105 Write-Output "running with SYSTEM account; installing for all users"
101 $allusers = "1"
106 $allusers = "1"
102 }
107 }
103 else {
108 else {
104 $allusers = "0"
109 $allusers = "0"
105 }
110 }
106
111
107 Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0"
112 Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0"
108 Invoke-Process ${dest}\python.exe $pip
113 Invoke-Process ${dest}\python.exe $pip
109 }
114 }
110
115
111 function Install-Rust($prefix) {
116 function Install-Rust($prefix) {
112 Write-Output "installing Rust"
117 Write-Output "installing Rust"
113 $Env:RUSTUP_HOME = "${prefix}\rustup"
118 $Env:RUSTUP_HOME = "${prefix}\rustup"
114 $Env:CARGO_HOME = "${prefix}\cargo"
119 $Env:CARGO_HOME = "${prefix}\cargo"
115
120
116 Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc"
121 Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc"
117 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc"
122 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc"
118 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.52.0"
123 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.52.0"
119 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy"
124 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy"
120 }
125 }
121
126
122 function Install-Dependencies($prefix) {
127 function Install-Dependencies($prefix) {
123 if (!(Test-Path -Path $prefix\assets)) {
128 if (!(Test-Path -Path $prefix\assets)) {
124 New-Item -Path $prefix\assets -ItemType Directory
129 New-Item -Path $prefix\assets -ItemType Directory
125 }
130 }
126
131
127 $pip = "${prefix}\assets\get-pip.py"
132 $pip = "${prefix}\assets\get-pip.py"
128
133
129 Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256
134 Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256
130 Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256
135 Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256
131 Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256
136 Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256
132 Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
137 Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
133 Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256
138 Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256
134 Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256
139 Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256
140 Secure-Download $PYTHON310_x86_URL ${prefix}\assets\python310-x86.exe $PYTHON310_x86_SHA256
141 Secure-Download $PYTHON310_x64_URL ${prefix}\assets\python310-x64.exe $PYTHON310_x64_SHA256
135 Secure-Download $PIP_URL ${pip} $PIP_SHA256
142 Secure-Download $PIP_URL ${pip} $PIP_SHA256
136 Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
143 Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
137 Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
144 Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
138 Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256
145 Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256
139 Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256
146 Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256
140 Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256
147 Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256
141 Secure-Download $PYOXIDIZER_URL ${prefix}\assets\PyOxidizer.msi $PYOXIDIZER_SHA256
148 Secure-Download $PYOXIDIZER_URL ${prefix}\assets\PyOxidizer.msi $PYOXIDIZER_SHA256
142
149
143 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
150 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
144 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
151 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
145 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
152 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
146 # Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
153 # Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
147 Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
154 Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
148 Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
155 Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
156 Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
157 Install-Python3 "Python 3.10 64-bit" ${prefix}\assets\python310-x64.exe ${prefix}\python310-x64 ${pip}
149
158
150 Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
159 Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
151 Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
160 Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
152
161
153 Write-Output "installing PyOxidizer"
162 Write-Output "installing PyOxidizer"
154 Invoke-Process msiexec.exe "/i ${prefix}\assets\PyOxidizer.msi /l* ${prefix}\assets\PyOxidizer.log /quiet"
163 Invoke-Process msiexec.exe "/i ${prefix}\assets\PyOxidizer.msi /l* ${prefix}\assets\PyOxidizer.log /quiet"
155
164
156 Install-Rust ${prefix}
165 Install-Rust ${prefix}
157
166
158 Write-Output "installing Inno Setup"
167 Write-Output "installing Inno Setup"
159 Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES"
168 Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES"
160
169
161 Write-Output "extracting MinGW base archive"
170 Write-Output "extracting MinGW base archive"
162 Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force
171 Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force
163
172
164 Write-Output "updating MinGW package catalogs"
173 Write-Output "updating MinGW package catalogs"
165 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update"
174 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update"
166
175
167 Write-Output "installing MinGW packages"
176 Write-Output "installing MinGW packages"
168 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip"
177 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip"
169
178
170 # Construct a virtualenv useful for bootstrapping. It conveniently contains a
179 # Construct a virtualenv useful for bootstrapping. It conveniently contains a
171 # Mercurial install.
180 # Mercurial install.
172 Write-Output "creating bootstrap virtualenv with Mercurial"
181 Write-Output "creating bootstrap virtualenv with Mercurial"
173 Invoke-Process "$prefix\python39-x64\python.exe" "-m venv ${prefix}\venv-bootstrap"
182 Invoke-Process "$prefix\python39-x64\python.exe" "-m venv ${prefix}\venv-bootstrap"
174 Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}"
183 Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}"
175 }
184 }
176
185
177 function Clone-Mercurial-Repo($prefix, $repo_url, $dest) {
186 function Clone-Mercurial-Repo($prefix, $repo_url, $dest) {
178 Write-Output "cloning $repo_url to $dest"
187 Write-Output "cloning $repo_url to $dest"
179 # TODO Figure out why CA verification isn't working in EC2 and remove
188 # TODO Figure out why CA verification isn't working in EC2 and remove
180 # --insecure.
189 # --insecure.
181 Invoke-Process "${prefix}\venv-bootstrap\Scripts\python.exe" "${prefix}\venv-bootstrap\Scripts\hg clone --insecure $repo_url $dest"
190 Invoke-Process "${prefix}\venv-bootstrap\Scripts\python.exe" "${prefix}\venv-bootstrap\Scripts\hg clone --insecure $repo_url $dest"
182
191
183 # Mark repo as non-publishing by default for convenience.
192 # Mark repo as non-publishing by default for convenience.
184 Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false"
193 Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false"
185 }
194 }
186
195
187 $prefix = "c:\hgdev"
196 $prefix = "c:\hgdev"
188 Install-Dependencies $prefix
197 Install-Dependencies $prefix
189 Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src
198 Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src
@@ -1,301 +1,328 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile with python 3.7
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py3.txt contrib/packaging/requirements-windows.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py3.txt contrib/packaging/requirements-windows.txt.in
6 #
6 #
7 atomicwrites==1.4.0 \
7 atomicwrites==1.4.0 \
8 --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \
8 --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \
9 --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a \
9 --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a
10 # via pytest
10 # via pytest
11 attrs==21.2.0 \
11 attrs==21.2.0 \
12 --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \
12 --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \
13 --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb \
13 --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb
14 # via pytest
14 # via pytest
15 cached-property==1.5.2 \
15 cached-property==1.5.2 \
16 --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
16 --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
17 --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 \
17 --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0
18 # via pygit2
18 # via pygit2
19 certifi==2021.5.30 \
19 certifi==2021.5.30 \
20 --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \
20 --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \
21 --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 \
21 --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8
22 # via dulwich
22 # via dulwich
23 cffi==1.14.4 \
23 cffi==1.15.0 \
24 --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \
24 --hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \
25 --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \
25 --hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \
26 --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \
26 --hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \
27 --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \
27 --hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \
28 --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \
28 --hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \
29 --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \
29 --hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \
30 --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \
30 --hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \
31 --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \
31 --hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \
32 --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \
32 --hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \
33 --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \
33 --hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \
34 --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \
34 --hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \
35 --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \
35 --hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \
36 --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \
36 --hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \
37 --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \
37 --hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \
38 --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \
38 --hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \
39 --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \
39 --hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \
40 --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \
40 --hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \
41 --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \
41 --hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \
42 --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \
42 --hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \
43 --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \
43 --hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \
44 --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \
44 --hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \
45 --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \
45 --hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \
46 --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \
46 --hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \
47 --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \
47 --hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \
48 --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \
48 --hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \
49 --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \
49 --hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \
50 --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \
50 --hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \
51 --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \
51 --hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \
52 --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \
52 --hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \
53 --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \
53 --hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \
54 --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \
54 --hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \
55 --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \
55 --hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \
56 --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \
56 --hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \
57 --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \
57 --hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \
58 --hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \
59 --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \
60 --hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \
61 --hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \
62 --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \
63 --hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \
64 --hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \
65 --hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \
66 --hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \
67 --hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \
68 --hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \
69 --hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \
70 --hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \
71 --hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \
72 --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \
73 --hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796
58 # via pygit2
74 # via pygit2
59 colorama==0.4.4 \
75 colorama==0.4.4 \
60 --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \
76 --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \
61 --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 \
77 --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2
62 # via pytest
78 # via pytest
63 docutils==0.16 \
79 docutils==0.16 \
64 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
80 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
65 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
81 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
66 # via -r contrib/packaging/requirements-windows.txt.in
82 # via -r contrib/packaging/requirements-windows.txt.in
67 dulwich==0.20.6 ; python_version >= "3" \
83 dulwich==0.20.6 ; python_version >= "3" \
68 --hash=sha256:1ccd55e38fa9f169290f93e027ab4508202f5bdd6ef534facac4edd3f6903f0d \
84 --hash=sha256:1ccd55e38fa9f169290f93e027ab4508202f5bdd6ef534facac4edd3f6903f0d \
69 --hash=sha256:2452a0379cc7bbbd7ab893ec104d18039f1ea98b0d6be6bca5646e5cf29e0ae9 \
85 --hash=sha256:2452a0379cc7bbbd7ab893ec104d18039f1ea98b0d6be6bca5646e5cf29e0ae9 \
70 --hash=sha256:2f4aebc54ed2d37dcee737024421452375570a422eb682232e676aa7ebc9cb4b \
86 --hash=sha256:2f4aebc54ed2d37dcee737024421452375570a422eb682232e676aa7ebc9cb4b \
71 --hash=sha256:304f52b10c49c3a6ddfbd73e2e93d8e979350225cfba9688e51110e74fa2f718 \
87 --hash=sha256:304f52b10c49c3a6ddfbd73e2e93d8e979350225cfba9688e51110e74fa2f718 \
72 --hash=sha256:49e747c72d9099e873bf6196260346d5996c3f28af788294d47a8accdc524de7 \
88 --hash=sha256:49e747c72d9099e873bf6196260346d5996c3f28af788294d47a8accdc524de7 \
73 --hash=sha256:4fee359928c59b53af153a582a7ed7595259a5a825df400301a29e17fd78dfd3 \
89 --hash=sha256:4fee359928c59b53af153a582a7ed7595259a5a825df400301a29e17fd78dfd3 \
74 --hash=sha256:50ef300a9fa4efd9f85009c2bd8b515266ec1529400f8834f85c04fa9f09b2c0 \
90 --hash=sha256:50ef300a9fa4efd9f85009c2bd8b515266ec1529400f8834f85c04fa9f09b2c0 \
75 --hash=sha256:5348310f21b2a23847342ce464461499b6652483fa42de03714d0f6421a99698 \
91 --hash=sha256:5348310f21b2a23847342ce464461499b6652483fa42de03714d0f6421a99698 \
76 --hash=sha256:7e7b5dea5178b6493fdb83adccbe81de9ddff55f79880185ed594c0e3a97209b \
92 --hash=sha256:7e7b5dea5178b6493fdb83adccbe81de9ddff55f79880185ed594c0e3a97209b \
77 --hash=sha256:8f7a7f973be2beedfb10dd8d3eb6bdf9ec466c72ad555704897cbd6357fe5021 \
93 --hash=sha256:8f7a7f973be2beedfb10dd8d3eb6bdf9ec466c72ad555704897cbd6357fe5021 \
78 --hash=sha256:bea6e6caffc6c73bfd1647714c5715ab96ac49deb8beb8b67511529afa25685a \
94 --hash=sha256:bea6e6caffc6c73bfd1647714c5715ab96ac49deb8beb8b67511529afa25685a \
79 --hash=sha256:e5871b86a079e9e290f52ab14559cea1b694a0b8ed2b9ebb898f6ced7f14a406 \
95 --hash=sha256:e5871b86a079e9e290f52ab14559cea1b694a0b8ed2b9ebb898f6ced7f14a406 \
80 --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b \
96 --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b
81 # via -r contrib/packaging/requirements-windows.txt.in
97 # via -r contrib/packaging/requirements-windows.txt.in
82 fuzzywuzzy==0.18.0 \
98 fuzzywuzzy==0.18.0 \
83 --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \
99 --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8
84 # via -r contrib/packaging/requirements-windows.txt.in
100 # via -r contrib/packaging/requirements-windows.txt.in
85 idna==3.2 \
101 idna==3.2 \
86 --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \
102 --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \
87 --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 \
103 --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3
88 # via yarl
104 # via yarl
89 importlib-metadata==3.1.0 \
105 importlib-metadata==3.1.0 \
90 --hash=sha256:590690d61efdd716ff82c39ca9a9d4209252adfe288a4b5721181050acbd4175 \
106 --hash=sha256:590690d61efdd716ff82c39ca9a9d4209252adfe288a4b5721181050acbd4175 \
91 --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099 \
107 --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099
92 # via keyring, pluggy, pytest
108 # via
109 # keyring
110 # pluggy
111 # pytest
93 iniconfig==1.1.1 \
112 iniconfig==1.1.1 \
94 --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
113 --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
95 --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 \
114 --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32
96 # via pytest
115 # via pytest
97 keyring==21.4.0 \
116 keyring==21.4.0 \
98 --hash=sha256:4e34ea2fdec90c1c43d6610b5a5fafa1b9097db1802948e90caf5763974b8f8d \
117 --hash=sha256:4e34ea2fdec90c1c43d6610b5a5fafa1b9097db1802948e90caf5763974b8f8d \
99 --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466 \
118 --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466
100 # via -r contrib/packaging/requirements-windows.txt.in
119 # via -r contrib/packaging/requirements-windows.txt.in
101 multidict==5.1.0 \
120 multidict==5.1.0 \
102 --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
121 --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
103 --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \
122 --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \
104 --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \
123 --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \
105 --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \
124 --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \
106 --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \
125 --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \
107 --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \
126 --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \
108 --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \
127 --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \
109 --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \
128 --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \
110 --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \
129 --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \
111 --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \
130 --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \
112 --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \
131 --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \
113 --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \
132 --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \
114 --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \
133 --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \
115 --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \
134 --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \
116 --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \
135 --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \
117 --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \
136 --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \
118 --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \
137 --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \
119 --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \
138 --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \
120 --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \
139 --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \
121 --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \
140 --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \
122 --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \
141 --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \
123 --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \
142 --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \
124 --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \
143 --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \
125 --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \
144 --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \
126 --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \
145 --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \
127 --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \
146 --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \
128 --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \
147 --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \
129 --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \
148 --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \
130 --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \
149 --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \
131 --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \
150 --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \
132 --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \
151 --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \
133 --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \
152 --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \
134 --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \
153 --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \
135 --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
154 --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
136 --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
155 --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
137 --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
156 --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
138 --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 \
157 --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80
139 # via yarl
158 # via yarl
140 packaging==21.0 \
159 packaging==21.0 \
141 --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \
160 --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \
142 --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 \
161 --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14
143 # via pytest
162 # via pytest
144 pluggy==0.13.1 \
163 pluggy==0.13.1 \
145 --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \
164 --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \
146 --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \
165 --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d
147 # via pytest
166 # via pytest
148 py==1.10.0 \
167 py==1.10.0 \
149 --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \
168 --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \
150 --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a \
169 --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a
151 # via pytest
170 # via pytest
152 pycparser==2.20 \
171 pycparser==2.21 \
153 --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
172 --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
154 --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \
173 --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
155 # via cffi
174 # via cffi
156 pygit2==1.4.0 ; python_version >= "3" \
175 pygit2==1.7.1 ; python_version >= "3" \
157 --hash=sha256:0d298098e286eeda000e49ca7e1b41f87300e10dd8b9d06b32b008bd61f50b83 \
176 --hash=sha256:2c9e95efb86c0b32cc07c26be3d179e851ca4a7899c47fef63c4203963144f5e \
158 --hash=sha256:0ee135eb2cd8b07ce1374f3596cc5c3213472d6389bad6a4c5d87d8e267e93e9 \
177 --hash=sha256:3ddacbf461652d3d4900382f821d9fbd5ae2dedecd7862b5245842419ad0ccba \
159 --hash=sha256:32eb863d6651d4890ced318505ea8dc229bd9637deaf29c898de1ab574d727a0 \
178 --hash=sha256:4cb0414df6089d0072ebe93ff2f34730737172dd5f0e72289567d06a6caf09c0 \
160 --hash=sha256:37d6d7d6d7804c42a0fe23425c72e38093488525092fc5e51a05684e63503ce7 \
179 --hash=sha256:56e960dc74f4582bfa3ca17a1a9d542732fc93b5cf8f82574c235d06b2d61eae \
161 --hash=sha256:41204b6f3406d9f53147710f3cc485d77181ba67f57c34d36b7c86de1c14a18c \
180 --hash=sha256:6b17ab922c2a2d99b30ab9222472b07732bf7261d9f9655a4ea23b4c700049d8 \
162 --hash=sha256:818c91b582109d90580c5da74af783738838353f15eb12eeb734d80a974b05a3 \
181 --hash=sha256:73a7b471f22cb59e8729016de1f447c472b3b2c1cc2b622194e5e3b48a7f5776 \
163 --hash=sha256:8306a302487dac67df7af6a064bb37e8a8eb4138958f9560ff49ff162e185dab \
182 --hash=sha256:761a8850e33822796c1c24d411d5cc2460c04e1a74b04ae8560efd3596bbd6bd \
164 --hash=sha256:9c2f2d9ef59513007b66f6534b000792b614de3faf60313a0a68f6b8571aea85 \
183 --hash=sha256:7c467e81158f5827b3bca6362e5cc9b92857eff9de65034d338c1f18524b09be \
165 --hash=sha256:9c8d5881eb709e2e2e13000b507a131bd5fb91a879581030088d0ddffbcd19af \
184 --hash=sha256:7c56e10592e62610a19bd3e2a633aafe3488c57b906c7c2fde0299937f0f0b2f \
166 --hash=sha256:b422e417739def0a136a6355723dfe8a5ffc83db5098076f28a14f1d139779c1 \
185 --hash=sha256:7cc2a8e29cc9598310a78cf58b70d9331277cf374802be8f97d97c4a9e5d8387 \
167 --hash=sha256:cbeb38ab1df9b5d8896548a11e63aae8a064763ab5f1eabe4475e6b8a78ee1c8 \
186 --hash=sha256:812670f7994f31778e873a9eced29d2bbfa91674e8be0ab1e974c8a4bda9cbab \
168 --hash=sha256:cf00481ddf053e549a6edd0216bdc267b292d261eae02a67bb3737de920cbf88 \
187 --hash=sha256:8cdb0b1d6c3d24b44f340fed143b16e64ba23fe2a449f1a5db87aaf9339a9dbe \
169 --hash=sha256:d0d889144e9487d926fecea947c3f39ce5f477e521d7d467d2e66907e4cd657d \
188 --hash=sha256:91b77a305d8d18b649396e66e832d654cd593a3d29b5728f753f254a04533812 \
170 --hash=sha256:ddb7a1f6d38063e8724abfa1cfdfb0f9b25014b8bca0546274b7a84b873a3888 \
189 --hash=sha256:a75bcde32238c77eb0cf7d9698a5aa899408d7ad999a5920a29a7c4b80fdeaa7 \
171 --hash=sha256:e9037a7d810750fe23c9f5641ef14a0af2525ff03e14752cd4f73e1870ecfcb0 \
190 --hash=sha256:b060240cf3038e7a0706bbfc5436dd03b8d5ac797ac1d512b613f4d04b974c80 \
172 --hash=sha256:ec5c0365a9bdfcac1609d20868507b28685ec5ea7cc3a2c903c9b62ef2e0bbc0 \
191 --hash=sha256:cdfa61c0428a8182e5a6a1161c017b824cd511574f080a40b10d6413774eb0ca \
173 --hash=sha256:fdd8ba30cda277290e000322f505132f590cf89bd7d31829b45a3cb57447ec32 \
192 --hash=sha256:d7faa29558436decc2e78110f38d6677eb366b683ba5cdc2803d47195711165d \
193 --hash=sha256:d831825ad9c3b3c28e6b3ef8a2401ad2d3fd4db5455427ff27175a7e254e2592 \
194 --hash=sha256:df4c477bdfac85d32a1e3180282cd829a0980aa69be9bd0f7cbd4db1778ca72b \
195 --hash=sha256:eced3529bafcaaac015d08dfaa743b3cbad37fcd5b13ae9d280b8b7f716ec5ce \
196 --hash=sha256:fec17e2da668e6bb192d777417aad9c7ca924a166d0a0b9a81a11e00362b1bc7
174 # via -r contrib/packaging/requirements-windows.txt.in
197 # via -r contrib/packaging/requirements-windows.txt.in
175 pygments==2.7.1 \
198 pygments==2.7.1 \
176 --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
199 --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
177 --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \
200 --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7
178 # via -r contrib/packaging/requirements-windows.txt.in
201 # via -r contrib/packaging/requirements-windows.txt.in
179 pyparsing==2.4.7 \
202 pyparsing==2.4.7 \
180 --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
203 --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
181 --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
204 --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
182 # via packaging
205 # via packaging
183 pytest-vcr==1.0.2 \
184 --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896 \
185 # via -r contrib/packaging/requirements-windows.txt.in
186 pytest==6.2.4 \
206 pytest==6.2.4 \
187 --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \
207 --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \
188 --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 \
208 --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890
189 # via pytest-vcr
209 # via pytest-vcr
210 pytest-vcr==1.0.2 \
211 --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896
212 # via -r contrib/packaging/requirements-windows.txt.in
190 pywin32-ctypes==0.2.0 \
213 pywin32-ctypes==0.2.0 \
191 --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
214 --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
192 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \
215 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
193 # via -r contrib/packaging/requirements-windows.txt.in, keyring
216 # via
217 # -r contrib/packaging/requirements-windows.txt.in
218 # keyring
194 pyyaml==5.4.1 \
219 pyyaml==5.4.1 \
195 --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
220 --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
196 --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
221 --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
197 --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
222 --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
198 --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
223 --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
199 --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
224 --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
200 --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
225 --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
201 --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
226 --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
202 --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
227 --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
203 --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
228 --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
204 --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
229 --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
205 --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \
230 --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \
206 --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
231 --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
207 --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
232 --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
208 --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
233 --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
209 --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
234 --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
210 --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
235 --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
211 --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
236 --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
212 --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
237 --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
213 --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
238 --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
214 --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
239 --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
215 --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
240 --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
216 --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
241 --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
217 --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
242 --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
218 --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
243 --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
219 --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
244 --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
220 --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
245 --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
221 --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
246 --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
222 --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
247 --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
223 --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 \
248 --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
224 # via vcrpy
249 # via vcrpy
225 six==1.16.0 \
250 six==1.16.0 \
226 --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
251 --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
227 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 \
252 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
228 # via vcrpy
253 # via vcrpy
229 toml==0.10.2 \
254 toml==0.10.2 \
230 --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
255 --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
231 --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f \
256 --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f
232 # via pytest
257 # via pytest
233 typing-extensions==3.10.0.0 \
258 typing-extensions==3.10.0.0 \
234 --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
259 --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
235 --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \
260 --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \
236 --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 \
261 --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84
237 # via yarl
262 # via yarl
238 urllib3==1.25.11 \
263 urllib3==1.25.11 \
239 --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
264 --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
240 --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
265 --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e
241 # via dulwich
266 # via dulwich
242 vcrpy==4.1.1 \
267 vcrpy==4.1.1 \
243 --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \
268 --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \
244 --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 \
269 --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599
245 # via pytest-vcr
270 # via pytest-vcr
246 windows-curses==2.2.0 \
271 windows-curses==2.3.0 \
247 --hash=sha256:1452d771ec6f9b3fef037da2b169196a9a12be4e86a6c27dd579adac70c42028 \
272 --hash=sha256:170c0d941c2e0cdf864e7f0441c1bdf0709232bf4aa7ce7f54d90fc76a4c0504 \
248 --hash=sha256:267544e4f60c09af6505e50a69d7f01d7f8a281cf4bd4fc7efc3b32b9a4ef64e \
273 --hash=sha256:4d5fb991d1b90a41c2332f02241a1f84c8a1e6bc8f6e0d26f532d0da7a9f7b51 \
249 --hash=sha256:389228a3df556102e72450f599283094168aa82eee189f501ad9f131a0fc92e1 \
274 --hash=sha256:7a35eda4cb120b9e1a5ae795f3bc06c55b92c9d391baba6be1903285a05f3551 \
250 --hash=sha256:84336fe470fa07288daec5c684dec74c0766fec6b3511ccedb4c494804acfbb7 \
275 --hash=sha256:935be95cfdb9213f6f5d3d5bcd489960e3a8fbc9b574e7b2e8a3a3cc46efff49 \
251 --hash=sha256:9aa6ff60be76f5de696dc6dbf7897e3b1e6abcf4c0f741e9a0ee22cd6ef382f8 \
276 --hash=sha256:a3a63a0597729e10f923724c2cf972a23ea677b400d2387dee1d668cf7116177 \
252 --hash=sha256:c4a8ce00e82635f06648cc40d99f470be4e3ffeb84f9f7ae9d6a4f68ec6361e7 \
277 --hash=sha256:c860f596d28377e47f322b7382be4d3573fd76d1292234996bb7f72e0bc0ed0d \
253 --hash=sha256:c5cd032bc7d0f03224ab55c925059d98e81795098d59bbd10f7d05c7ea9677ce \
278 --hash=sha256:cc5fa913780d60f4a40824d374a4f8ca45b4e205546e83a2d85147315a57457e \
254 --hash=sha256:fc0be372fe6da3c39d7093154ce029115a927bf287f34b4c615e2b3f8c23dfaa \
279 --hash=sha256:d5cde8ec6d582aa77af791eca54f60858339fb3f391945f9cad11b1ab71062e3 \
280 --hash=sha256:e913dc121446d92b33fe4f5bcca26d3a34e4ad19f2af160370d57c3d1e93b4e1 \
281 --hash=sha256:fbc2131cec57e422c6660e6cdb3420aff5be5169b8e45bb7c471f884b0590a2b
255 # via -r contrib/packaging/requirements-windows.txt.in
282 # via -r contrib/packaging/requirements-windows.txt.in
256 wrapt==1.12.1 \
283 wrapt==1.12.1 \
257 --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 \
284 --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7
258 # via vcrpy
285 # via vcrpy
259 yarl==1.6.3 \
286 yarl==1.6.3 \
260 --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
287 --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
261 --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \
288 --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \
262 --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \
289 --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \
263 --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \
290 --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \
264 --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \
291 --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \
265 --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \
292 --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \
266 --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \
293 --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \
267 --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \
294 --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \
268 --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \
295 --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \
269 --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \
296 --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \
270 --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \
297 --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \
271 --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \
298 --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \
272 --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \
299 --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \
273 --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \
300 --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \
274 --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \
301 --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \
275 --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \
302 --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \
276 --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \
303 --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \
277 --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \
304 --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \
278 --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \
305 --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \
279 --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \
306 --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \
280 --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \
307 --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \
281 --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \
308 --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \
282 --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \
309 --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \
283 --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \
310 --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \
284 --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \
311 --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \
285 --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \
312 --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \
286 --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \
313 --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \
287 --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \
314 --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \
288 --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \
315 --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \
289 --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \
316 --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \
290 --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \
317 --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \
291 --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \
318 --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \
292 --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \
319 --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \
293 --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
320 --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
294 --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
321 --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
295 --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
322 --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
296 --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 \
323 --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71
297 # via vcrpy
324 # via vcrpy
298 zipp==3.4.0 \
325 zipp==3.4.0 \
299 --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \
326 --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \
300 --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb \
327 --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb
301 # via importlib-metadata
328 # via importlib-metadata
@@ -1,49 +1,49 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile with python 3.7
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
6 #
6 #
7 docutils==0.16 \
7 docutils==0.16 \
8 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
8 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
9 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
9 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
10 # via -r contrib/packaging/requirements.txt.in
10 # via -r contrib/packaging/requirements.txt.in
11 jinja2==2.11.2 \
11 jinja2==2.11.2 \
12 --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
12 --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
13 --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
13 --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035
14 # via -r contrib/packaging/requirements.txt.in
14 # via -r contrib/packaging/requirements.txt.in
15 markupsafe==1.1.1 \
15 markupsafe==1.1.1 \
16 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
16 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
17 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
17 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
18 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
18 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
19 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
19 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
20 --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
20 --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
21 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
21 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
22 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
22 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
23 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
23 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
24 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
24 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
25 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
25 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
26 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
26 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
27 --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
27 --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
28 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
28 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
29 --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
29 --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
30 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
30 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
31 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
31 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
32 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
32 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
33 --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
33 --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
34 --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
34 --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
35 --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
35 --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
36 --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
36 --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
37 --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
37 --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
38 --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
38 --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
39 --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
39 --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
40 --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
40 --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
41 --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
41 --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
42 --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
42 --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
43 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
43 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
44 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
44 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
45 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
45 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
46 --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
46 --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
47 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
47 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
48 --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
48 --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be
49 # via jinja2
49 # via jinja2
@@ -1,41 +1,47 b''
1 #!/bin/bash
1 #!/bin/bash
2 set -eu
2 set -eu
3
3
4 if [[ "$PHABRICATOR_TOKEN" == "NO-PHAB" ]]; then
5 echo 'Skipping Phabricator Step' >&2
6 exit 0
7 fi
8
4 revision_in_stack=`hg log \
9 revision_in_stack=`hg log \
5 --rev '.#stack and ::. and topic()' \
10 --rev '.#stack and ::. and topic()' \
6 -T '\nONE-REV\n' \
11 -T '\nONE-REV\n' \
7 | grep 'ONE-REV' | wc -l`
12 | grep 'ONE-REV' | wc -l`
8 revision_on_phab=`hg log \
13 revision_on_phab=`hg log \
9 --rev '.#stack and ::. and topic() and desc("re:\nDifferential Revision: [^\n]+D\d+$")'\
14 --rev '.#stack and ::. and topic() and desc("re:\nDifferential Revision: [^\n]+D\d+$")'\
10 -T '\nONE-REV\n' \
15 -T '\nONE-REV\n' \
11 | grep 'ONE-REV' | wc -l`
16 | grep 'ONE-REV' | wc -l`
12
17
13 if [[ $revision_in_stack -eq 0 ]]; then
18 if [[ $revision_in_stack -eq 0 ]]; then
14 echo "stack is empty" >&2
19 echo "stack is empty" >&2
15 exit 0
20 exit 0
16 fi
21 fi
17
22
18 if [[ $revision_on_phab -eq 0 ]]; then
23 if [[ $revision_on_phab -eq 0 ]]; then
19 echo "no tracked diff in this stack" >&2
24 echo "no tracked diff in this stack" >&2
20 exit 0
25 exit 0
21 fi
26 fi
22
27
23 if [[ $revision_on_phab -lt $revision_in_stack ]]; then
28 if [[ $revision_on_phab -lt $revision_in_stack ]]; then
24 echo "not all stack changesets (${revision_in_stack}) have matching Phabricator Diff (${revision_on_phab})" >&2
29 echo "not all stack changesets (${revision_in_stack}) have matching Phabricator Diff (${revision_on_phab})" >&2
25 exit 2
30 exit 2
26 fi
31 fi
27
32
28 if [[ "$PHABRICATOR_TOKEN" == "" ]]; then
33 if [[ "$PHABRICATOR_TOKEN" == "" ]]; then
29 echo 'missing $PHABRICATOR_TOKEN variable' >&2
34 echo 'missing $PHABRICATOR_TOKEN variable' >&2
35 echo '(use PHABRICATOR_TOKEN="NO-PHAB" to disable this step)' >&2
30 exit 2
36 exit 2
31 fi
37 fi
32
38
33 hg \
39 hg \
34 --config extensions.phabricator= \
40 --config extensions.phabricator= \
35 --config phabricator.url=https://phab.mercurial-scm.org/ \
41 --config phabricator.url=https://phab.mercurial-scm.org/ \
36 --config phabricator.callsign=HG \
42 --config phabricator.callsign=HG \
37 --config auth.phabricator.schemes=https \
43 --config auth.phabricator.schemes=https \
38 --config auth.phabricator.prefix=phab.mercurial-scm.org \
44 --config auth.phabricator.prefix=phab.mercurial-scm.org \
39 --config auth.phabricator.phabtoken=$PHABRICATOR_TOKEN \
45 --config auth.phabricator.phabtoken=$PHABRICATOR_TOKEN \
40 phabsend --rev '.#stack and ::. and topic()' \
46 phabsend --rev '.#stack and ::. and topic()' \
41 "$@"
47 "$@"
@@ -1,102 +1,141 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 from __future__ import absolute_import
2 from __future__ import absolute_import
3
3
4 import getopt
4 import getopt
5 import sys
5 import sys
6
6
7 import hgdemandimport
7 import hgdemandimport
8
8
9 hgdemandimport.enable()
9 hgdemandimport.enable()
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import (
12 from mercurial import (
13 context,
13 context,
14 error,
14 error,
15 fancyopts,
15 fancyopts,
16 pycompat,
17 simplemerge,
16 simplemerge,
18 ui as uimod,
17 ui as uimod,
18 util,
19 )
19 )
20 from mercurial.utils import procutil, stringutil
20 from mercurial.utils import procutil, stringutil
21
21
22 options = [
22 options = [
23 (b'L', b'label', [], _(b'labels to use on conflict markers')),
23 (b'L', b'label', [], _(b'labels to use on conflict markers')),
24 (b'a', b'text', None, _(b'treat all files as text')),
24 (b'a', b'text', None, _(b'treat all files as text')),
25 (b'p', b'print', None, _(b'print results instead of overwriting LOCAL')),
25 (b'p', b'print', None, _(b'print results instead of overwriting LOCAL')),
26 (b'', b'no-minimal', None, _(b'no effect (DEPRECATED)')),
26 (b'', b'no-minimal', None, _(b'no effect (DEPRECATED)')),
27 (b'h', b'help', None, _(b'display help and exit')),
27 (b'h', b'help', None, _(b'display help and exit')),
28 (b'q', b'quiet', None, _(b'suppress output')),
28 (b'q', b'quiet', None, _(b'suppress output')),
29 ]
29 ]
30
30
31 usage = _(
31 usage = _(
32 b'''simplemerge [OPTS] LOCAL BASE OTHER
32 b'''simplemerge [OPTS] LOCAL BASE OTHER
33
33
34 Simple three-way file merge utility with a minimal feature set.
34 Simple three-way file merge utility with a minimal feature set.
35
35
36 Apply to LOCAL the changes necessary to go from BASE to OTHER.
36 Apply to LOCAL the changes necessary to go from BASE to OTHER.
37
37
38 By default, LOCAL is overwritten with the results of this operation.
38 By default, LOCAL is overwritten with the results of this operation.
39 '''
39 '''
40 )
40 )
41
41
42
42
43 class ParseError(Exception):
43 class ParseError(Exception):
44 """Exception raised on errors in parsing the command line."""
44 """Exception raised on errors in parsing the command line."""
45
45
46
46
47 def showhelp():
47 def showhelp():
48 procutil.stdout.write(usage)
48 procutil.stdout.write(usage)
49 procutil.stdout.write(b'\noptions:\n')
49 procutil.stdout.write(b'\noptions:\n')
50
50
51 out_opts = []
51 out_opts = []
52 for shortopt, longopt, default, desc in options:
52 for shortopt, longopt, default, desc in options:
53 out_opts.append(
53 out_opts.append(
54 (
54 (
55 b'%2s%s'
55 b'%2s%s'
56 % (
56 % (
57 shortopt and b'-%s' % shortopt,
57 shortopt and b'-%s' % shortopt,
58 longopt and b' --%s' % longopt,
58 longopt and b' --%s' % longopt,
59 ),
59 ),
60 b'%s' % desc,
60 b'%s' % desc,
61 )
61 )
62 )
62 )
63 opts_len = max([len(opt[0]) for opt in out_opts])
63 opts_len = max([len(opt[0]) for opt in out_opts])
64 for first, second in out_opts:
64 for first, second in out_opts:
65 procutil.stdout.write(b' %-*s %s\n' % (opts_len, first, second))
65 procutil.stdout.write(b' %-*s %s\n' % (opts_len, first, second))
66
66
67
67
68 def _verifytext(input, ui, quiet=False, allow_binary=False):
69 """verifies that text is non-binary (unless opts[text] is passed,
70 then we just warn)"""
71 if stringutil.binary(input.text()):
72 msg = _(b"%s looks like a binary file.") % input.fctx.path()
73 if not quiet:
74 ui.warn(_(b'warning: %s\n') % msg)
75 if not allow_binary:
76 sys.exit(1)
77
78
68 try:
79 try:
69 for fp in (sys.stdin, procutil.stdout, sys.stderr):
80 for fp in (sys.stdin, procutil.stdout, sys.stderr):
70 procutil.setbinary(fp)
81 procutil.setbinary(fp)
71
82
72 opts = {}
83 opts = {}
73 try:
84 try:
74 bargv = [a.encode('utf8') for a in sys.argv[1:]]
85 bargv = [a.encode('utf8') for a in sys.argv[1:]]
75 args = fancyopts.fancyopts(bargv, options, opts)
86 args = fancyopts.fancyopts(bargv, options, opts)
76 except getopt.GetoptError as e:
87 except getopt.GetoptError as e:
77 raise ParseError(e)
88 raise ParseError(e)
78 if opts[b'help']:
89 if opts[b'help']:
79 showhelp()
90 showhelp()
80 sys.exit(0)
91 sys.exit(0)
81 if len(args) != 3:
92 if len(args) != 3:
82 raise ParseError(_(b'wrong number of arguments').decode('utf8'))
93 raise ParseError(_(b'wrong number of arguments').decode('utf8'))
94 mode = b'merge'
95 if len(opts[b'label']) > 2:
96 mode = b'merge3'
83 local, base, other = args
97 local, base, other = args
84 sys.exit(
98 overrides = opts[b'label']
85 simplemerge.simplemerge(
99 if len(overrides) > 3:
86 uimod.ui.load(),
100 raise error.InputError(b'can only specify three labels.')
87 context.arbitraryfilectx(local),
101 labels = [local, other, base]
88 context.arbitraryfilectx(base),
102 labels[: len(overrides)] = overrides
89 context.arbitraryfilectx(other),
103 local_input = simplemerge.MergeInput(
90 **pycompat.strkwargs(opts)
104 context.arbitraryfilectx(local), labels[0]
91 )
105 )
106 other_input = simplemerge.MergeInput(
107 context.arbitraryfilectx(other), labels[1]
108 )
109 base_input = simplemerge.MergeInput(
110 context.arbitraryfilectx(base), labels[2]
92 )
111 )
112
113 quiet = opts.get(b'quiet')
114 allow_binary = opts.get(b'text')
115 ui = uimod.ui.load()
116 _verifytext(local_input, ui, quiet=quiet, allow_binary=allow_binary)
117 _verifytext(base_input, ui, quiet=quiet, allow_binary=allow_binary)
118 _verifytext(other_input, ui, quiet=quiet, allow_binary=allow_binary)
119
120 merged_text, conflicts = simplemerge.simplemerge(
121 local_input,
122 base_input,
123 other_input,
124 mode,
125 allow_binary=allow_binary,
126 )
127 if opts.get(b'print'):
128 ui.fout.write(merged_text)
129 else:
130 util.writefile(local, merged_text)
131 sys.exit(1 if conflicts else 0)
93 except ParseError as e:
132 except ParseError as e:
94 e = stringutil.forcebytestr(e)
133 e = stringutil.forcebytestr(e)
95 procutil.stdout.write(b"%s: %s\n" % (sys.argv[0].encode('utf8'), e))
134 procutil.stdout.write(b"%s: %s\n" % (sys.argv[0].encode('utf8'), e))
96 showhelp()
135 showhelp()
97 sys.exit(1)
136 sys.exit(1)
98 except error.Abort as e:
137 except error.Abort as e:
99 procutil.stderr.write(b"abort: %s\n" % e)
138 procutil.stderr.write(b"abort: %s\n" % e)
100 sys.exit(255)
139 sys.exit(255)
101 except KeyboardInterrupt:
140 except KeyboardInterrupt:
102 sys.exit(255)
141 sys.exit(255)
@@ -1,232 +1,241 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track and
12 The events that get logged can be configured via the blackbox.track and
13 blackbox.ignore config keys.
13 blackbox.ignore config keys.
14
14
15 Examples::
15 Examples::
16
16
17 [blackbox]
17 [blackbox]
18 track = *
18 track = *
19 ignore = pythonhook
19 ignore = pythonhook
20 # dirty is *EXPENSIVE* (slow);
20 # dirty is *EXPENSIVE* (slow);
21 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
21 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
22 dirty = True
22 dirty = True
23 # record the source of log messages
23 # record the source of log messages
24 logsource = True
24 logsource = True
25
25
26 [blackbox]
26 [blackbox]
27 track = command, commandfinish, commandexception, exthook, pythonhook
27 track = command, commandfinish, commandexception, exthook, pythonhook
28
28
29 [blackbox]
29 [blackbox]
30 track = incoming
30 track = incoming
31
31
32 [blackbox]
32 [blackbox]
33 # limit the size of a log file
33 # limit the size of a log file
34 maxsize = 1.5 MB
34 maxsize = 1.5 MB
35 # rotate up to N log files when the current one gets too big
35 # rotate up to N log files when the current one gets too big
36 maxfiles = 3
36 maxfiles = 3
37
37
38 [blackbox]
38 [blackbox]
39 # Include nanoseconds in log entries with %f (see Python function
39 # Include microseconds in log entries with %f (see Python function
40 # datetime.datetime.strftime)
40 # datetime.datetime.strftime)
41 date-format = %Y-%m-%d @ %H:%M:%S.%f
41 date-format = %Y-%m-%d @ %H:%M:%S.%f
42
42
43 """
43 """
44
44
45 from __future__ import absolute_import
45 from __future__ import absolute_import
46
46
47 import re
47 import re
48
48
49 from mercurial.i18n import _
49 from mercurial.i18n import _
50 from mercurial.node import hex
50 from mercurial.node import hex
51
51
52 from mercurial import (
52 from mercurial import (
53 encoding,
53 encoding,
54 loggingutil,
54 loggingutil,
55 registrar,
55 registrar,
56 )
56 )
57 from mercurial.utils import (
57 from mercurial.utils import (
58 dateutil,
58 dateutil,
59 procutil,
59 procutil,
60 )
60 )
61
61
62 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
63 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
64 # be specifying the version(s) of Mercurial they are tested with, or
64 # be specifying the version(s) of Mercurial they are tested with, or
65 # leave the attribute unspecified.
65 # leave the attribute unspecified.
66 testedwith = b'ships-with-hg-core'
66 testedwith = b'ships-with-hg-core'
67
67
68 cmdtable = {}
68 cmdtable = {}
69 command = registrar.command(cmdtable)
69 command = registrar.command(cmdtable)
70
70
71 configtable = {}
71 configtable = {}
72 configitem = registrar.configitem(configtable)
72 configitem = registrar.configitem(configtable)
73
73
74 configitem(
74 configitem(
75 b'blackbox',
75 b'blackbox',
76 b'dirty',
76 b'dirty',
77 default=False,
77 default=False,
78 )
78 )
79 configitem(
79 configitem(
80 b'blackbox',
80 b'blackbox',
81 b'maxsize',
81 b'maxsize',
82 default=b'1 MB',
82 default=b'1 MB',
83 )
83 )
84 configitem(
84 configitem(
85 b'blackbox',
85 b'blackbox',
86 b'logsource',
86 b'logsource',
87 default=False,
87 default=False,
88 )
88 )
89 configitem(
89 configitem(
90 b'blackbox',
90 b'blackbox',
91 b'maxfiles',
91 b'maxfiles',
92 default=7,
92 default=7,
93 )
93 )
94 configitem(
94 configitem(
95 b'blackbox',
95 b'blackbox',
96 b'track',
96 b'track',
97 default=lambda: [b'*'],
97 default=lambda: [b'*'],
98 )
98 )
99 configitem(
99 configitem(
100 b'blackbox',
100 b'blackbox',
101 b'ignore',
101 b'ignore',
102 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
102 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
103 )
103 )
104 configitem(
104 configitem(b'blackbox', b'date-format', default=b'')
105 b'blackbox',
106 b'date-format',
107 default=b'%Y/%m/%d %H:%M:%S',
108 )
109
105
110 _lastlogger = loggingutil.proxylogger()
106 _lastlogger = loggingutil.proxylogger()
111
107
112
108
113 class blackboxlogger(object):
109 class blackboxlogger(object):
114 def __init__(self, ui, repo):
110 def __init__(self, ui, repo):
115 self._repo = repo
111 self._repo = repo
116 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
112 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
117 self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore'))
113 self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore'))
118 self._maxfiles = ui.configint(b'blackbox', b'maxfiles')
114 self._maxfiles = ui.configint(b'blackbox', b'maxfiles')
119 self._maxsize = ui.configbytes(b'blackbox', b'maxsize')
115 self._maxsize = ui.configbytes(b'blackbox', b'maxsize')
120 self._inlog = False
116 self._inlog = False
121
117
122 def tracked(self, event):
118 def tracked(self, event):
123 return (
119 return (
124 b'*' in self._trackedevents and event not in self._ignoredevents
120 b'*' in self._trackedevents and event not in self._ignoredevents
125 ) or event in self._trackedevents
121 ) or event in self._trackedevents
126
122
127 def log(self, ui, event, msg, opts):
123 def log(self, ui, event, msg, opts):
128 # self._log() -> ctx.dirty() may create new subrepo instance, which
124 # self._log() -> ctx.dirty() may create new subrepo instance, which
129 # ui is derived from baseui. So the recursion guard in ui.log()
125 # ui is derived from baseui. So the recursion guard in ui.log()
130 # doesn't work as it's local to the ui instance.
126 # doesn't work as it's local to the ui instance.
131 if self._inlog:
127 if self._inlog:
132 return
128 return
133 self._inlog = True
129 self._inlog = True
134 try:
130 try:
135 self._log(ui, event, msg, opts)
131 self._log(ui, event, msg, opts)
136 finally:
132 finally:
137 self._inlog = False
133 self._inlog = False
138
134
139 def _log(self, ui, event, msg, opts):
135 def _log(self, ui, event, msg, opts):
140 default = ui.configdate(b'devel', b'default-date')
136 default = ui.configdate(b'devel', b'default-date')
141 date = dateutil.datestr(default, ui.config(b'blackbox', b'date-format'))
137 dateformat = ui.config(b'blackbox', b'date-format')
138 if dateformat:
139 date = dateutil.datestr(default, dateformat)
140 else:
141 # We want to display milliseconds (more precision seems
142 # unnecessary). Since %.3f is not supported, use %f and truncate
143 # microseconds.
144 date = dateutil.datestr(default, b'%Y-%m-%d %H:%M:%S.%f')[:-3]
142 user = procutil.getuser()
145 user = procutil.getuser()
143 pid = b'%d' % procutil.getpid()
146 pid = b'%d' % procutil.getpid()
144 changed = b''
147 changed = b''
145 ctx = self._repo[None]
148 ctx = self._repo[None]
146 parents = ctx.parents()
149 parents = ctx.parents()
147 rev = b'+'.join([hex(p.node()) for p in parents])
150 rev = b'+'.join([hex(p.node()) for p in parents])
148 if ui.configbool(b'blackbox', b'dirty') and ctx.dirty(
151 if ui.configbool(b'blackbox', b'dirty') and ctx.dirty(
149 missing=True, merge=False, branch=False
152 missing=True, merge=False, branch=False
150 ):
153 ):
151 changed = b'+'
154 changed = b'+'
152 if ui.configbool(b'blackbox', b'logsource'):
155 if ui.configbool(b'blackbox', b'logsource'):
153 src = b' [%s]' % event
156 src = b' [%s]' % event
154 else:
157 else:
155 src = b''
158 src = b''
156 try:
159 try:
157 fmt = b'%s %s @%s%s (%s)%s> %s'
160 fmt = b'%s %s @%s%s (%s)%s> %s'
158 args = (date, user, rev, changed, pid, src, msg)
161 args = (date, user, rev, changed, pid, src, msg)
159 with loggingutil.openlogfile(
162 with loggingutil.openlogfile(
160 ui,
163 ui,
161 self._repo.vfs,
164 self._repo.vfs,
162 name=b'blackbox.log',
165 name=b'blackbox.log',
163 maxfiles=self._maxfiles,
166 maxfiles=self._maxfiles,
164 maxsize=self._maxsize,
167 maxsize=self._maxsize,
165 ) as fp:
168 ) as fp:
166 fp.write(fmt % args)
169 fp.write(fmt % args)
167 except (IOError, OSError) as err:
170 except (IOError, OSError) as err:
168 # deactivate this to avoid failed logging again
171 # deactivate this to avoid failed logging again
169 self._trackedevents.clear()
172 self._trackedevents.clear()
170 ui.debug(
173 ui.debug(
171 b'warning: cannot write to blackbox.log: %s\n'
174 b'warning: cannot write to blackbox.log: %s\n'
172 % encoding.strtolocal(err.strerror)
175 % encoding.strtolocal(err.strerror)
173 )
176 )
174 return
177 return
175 _lastlogger.logger = self
178 _lastlogger.logger = self
176
179
177
180
178 def uipopulate(ui):
181 def uipopulate(ui):
179 ui.setlogger(b'blackbox', _lastlogger)
182 ui.setlogger(b'blackbox', _lastlogger)
180
183
181
184
182 def reposetup(ui, repo):
185 def reposetup(ui, repo):
183 # During 'hg pull' a httppeer repo is created to represent the remote repo.
186 # During 'hg pull' a httppeer repo is created to represent the remote repo.
184 # It doesn't have a .hg directory to put a blackbox in, so we don't do
187 # It doesn't have a .hg directory to put a blackbox in, so we don't do
185 # the blackbox setup for it.
188 # the blackbox setup for it.
186 if not repo.local():
189 if not repo.local():
187 return
190 return
188
191
189 # Since blackbox.log is stored in the repo directory, the logger should be
192 # Since blackbox.log is stored in the repo directory, the logger should be
190 # instantiated per repository.
193 # instantiated per repository.
191 logger = blackboxlogger(ui, repo)
194 logger = blackboxlogger(ui, repo)
192 ui.setlogger(b'blackbox', logger)
195 ui.setlogger(b'blackbox', logger)
193
196
194 # Set _lastlogger even if ui.log is not called. This gives blackbox a
197 # Set _lastlogger even if ui.log is not called. This gives blackbox a
195 # fallback place to log
198 # fallback place to log
196 if _lastlogger.logger is None:
199 if _lastlogger.logger is None:
197 _lastlogger.logger = logger
200 _lastlogger.logger = logger
198
201
199 repo._wlockfreeprefix.add(b'blackbox.log')
202 repo._wlockfreeprefix.add(b'blackbox.log')
200
203
201
204
202 @command(
205 @command(
203 b'blackbox',
206 b'blackbox',
204 [
207 [
205 (b'l', b'limit', 10, _(b'the number of events to show')),
208 (b'l', b'limit', 10, _(b'the number of events to show')),
206 ],
209 ],
207 _(b'hg blackbox [OPTION]...'),
210 _(b'hg blackbox [OPTION]...'),
208 helpcategory=command.CATEGORY_MAINTENANCE,
211 helpcategory=command.CATEGORY_MAINTENANCE,
209 helpbasic=True,
212 helpbasic=True,
210 )
213 )
211 def blackbox(ui, repo, *revs, **opts):
214 def blackbox(ui, repo, *revs, **opts):
212 """view the recent repository events"""
215 """view the recent repository events"""
213
216
214 if not repo.vfs.exists(b'blackbox.log'):
217 if not repo.vfs.exists(b'blackbox.log'):
215 return
218 return
216
219
217 limit = opts.get('limit')
220 limit = opts.get('limit')
218 fp = repo.vfs(b'blackbox.log', b'r')
221 fp = repo.vfs(b'blackbox.log', b'r')
219 lines = fp.read().split(b'\n')
222 lines = fp.read().split(b'\n')
220
223
221 count = 0
224 count = 0
222 output = []
225 output = []
223 for line in reversed(lines):
226 for line in reversed(lines):
224 if count >= limit:
227 if count >= limit:
225 break
228 break
226
229
227 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
230 # count the commands by matching lines like:
228 if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
231 # 2013/01/23 19:13:36 root>
232 # 2013/01/23 19:13:36 root (1234)>
233 # 2013/01/23 19:13:36 root @0000000000000000000000000000000000000000 (1234)>
234 # 2013-01-23 19:13:36.000 root @0000000000000000000000000000000000000000 (1234)>
235 if re.match(
236 br'^\d{4}[-/]\d{2}[-/]\d{2} \d{2}:\d{2}:\d{2}(.\d*)? .*> .*', line
237 ):
229 count += 1
238 count += 1
230 output.append(line)
239 output.append(line)
231
240
232 ui.status(b'\n'.join(reversed(output)))
241 ui.status(b'\n'.join(reversed(output)))
@@ -1,89 +1,89 b''
1 # commitextras.py
1 # commitextras.py
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''adds a new flag extras to commit (ADVANCED)'''
8 '''adds a new flag extras to commit (ADVANCED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import re
12 import re
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 commands,
16 commands,
17 error,
17 error,
18 extensions,
18 extensions,
19 registrar,
19 registrar,
20 util,
20 util,
21 )
21 )
22
22
23 cmdtable = {}
23 cmdtable = {}
24 command = registrar.command(cmdtable)
24 command = registrar.command(cmdtable)
25 testedwith = b'ships-with-hg-core'
25 testedwith = b'ships-with-hg-core'
26
26
27 usedinternally = {
27 usedinternally = {
28 b'amend_source',
28 b'amend_source',
29 b'branch',
29 b'branch',
30 b'close',
30 b'close',
31 b'histedit_source',
31 b'histedit_source',
32 b'topic',
32 b'topic',
33 b'rebase_source',
33 b'rebase_source',
34 b'intermediate-source',
34 b'intermediate-source',
35 b'__touch-noise__',
35 b'__touch-noise__',
36 b'source',
36 b'source',
37 b'transplant_source',
37 b'transplant_source',
38 }
38 }
39
39
40
40
41 def extsetup(ui):
41 def extsetup(ui):
42 entry = extensions.wrapcommand(commands.table, b'commit', _commit)
42 entry = extensions.wrapcommand(commands.table, b'commit', _commit)
43 options = entry[1]
43 options = entry[1]
44 options.append(
44 options.append(
45 (
45 (
46 b'',
46 b'',
47 b'extra',
47 b'extra',
48 [],
48 [],
49 _(b'set a changeset\'s extra values'),
49 _(b'set a changeset\'s extra values'),
50 _(b"KEY=VALUE"),
50 _(b"KEY=VALUE"),
51 )
51 )
52 )
52 )
53
53
54
54
55 def _commit(orig, ui, repo, *pats, **opts):
55 def _commit(orig, ui, repo, *pats, **opts):
56 if util.safehasattr(repo, 'unfiltered'):
56 if util.safehasattr(repo, 'unfiltered'):
57 repo = repo.unfiltered()
57 repo = repo.unfiltered()
58
58
59 class repoextra(repo.__class__):
59 class repoextra(repo.__class__):
60 def commit(self, *innerpats, **inneropts):
60 def commit(self, *innerpats, **inneropts):
61 extras = opts.get('extra')
61 extras = opts.get('extra')
62 for raw in extras:
62 for raw in extras:
63 if b'=' not in raw:
63 if b'=' not in raw:
64 msg = _(
64 msg = _(
65 b"unable to parse '%s', should follow "
65 b"unable to parse '%s', should follow "
66 b"KEY=VALUE format"
66 b"KEY=VALUE format"
67 )
67 )
68 raise error.Abort(msg % raw)
68 raise error.InputError(msg % raw)
69 k, v = raw.split(b'=', 1)
69 k, v = raw.split(b'=', 1)
70 if not k:
70 if not k:
71 msg = _(b"unable to parse '%s', keys can't be empty")
71 msg = _(b"unable to parse '%s', keys can't be empty")
72 raise error.Abort(msg % raw)
72 raise error.InputError(msg % raw)
73 if re.search(br'[^\w-]', k):
73 if re.search(br'[^\w-]', k):
74 msg = _(
74 msg = _(
75 b"keys can only contain ascii letters, digits,"
75 b"keys can only contain ascii letters, digits,"
76 b" '_' and '-'"
76 b" '_' and '-'"
77 )
77 )
78 raise error.Abort(msg)
78 raise error.InputError(msg)
79 if k in usedinternally:
79 if k in usedinternally:
80 msg = _(
80 msg = _(
81 b"key '%s' is used internally, can't be set "
81 b"key '%s' is used internally, can't be set "
82 b"manually"
82 b"manually"
83 )
83 )
84 raise error.Abort(msg % k)
84 raise error.InputError(msg % k)
85 inneropts['extra'][k] = v
85 inneropts['extra'][k] = v
86 return super(repoextra, self).commit(*innerpats, **inneropts)
86 return super(repoextra, self).commit(*innerpats, **inneropts)
87
87
88 repo.__class__ = repoextra
88 repo.__class__ = repoextra
89 return orig(ui, repo, *pats, **opts)
89 return orig(ui, repo, *pats, **opts)
@@ -1,732 +1,733 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import re
22 import re
23 import time
23 import time
24
24
25 from mercurial.i18n import _
25 from mercurial.i18n import _
26 from mercurial.pycompat import open
26 from mercurial.pycompat import open
27 from mercurial.node import (
27 from mercurial.node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 sha1nodeconstants,
30 sha1nodeconstants,
31 )
31 )
32 from mercurial import (
32 from mercurial import (
33 bookmarks,
33 bookmarks,
34 context,
34 context,
35 error,
35 error,
36 exchange,
36 exchange,
37 hg,
37 hg,
38 lock as lockmod,
38 lock as lockmod,
39 logcmdutil,
39 logcmdutil,
40 merge as mergemod,
40 merge as mergemod,
41 mergestate,
41 phases,
42 phases,
42 pycompat,
43 pycompat,
43 util,
44 util,
44 )
45 )
45 from mercurial.utils import dateutil
46 from mercurial.utils import dateutil
46
47
47 stringio = util.stringio
48 stringio = util.stringio
48
49
49 from . import common
50 from . import common
50
51
51 mapfile = common.mapfile
52 mapfile = common.mapfile
52 NoRepo = common.NoRepo
53 NoRepo = common.NoRepo
53
54
54 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
55 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
55
56
56
57
57 class mercurial_sink(common.converter_sink):
58 class mercurial_sink(common.converter_sink):
58 def __init__(self, ui, repotype, path):
59 def __init__(self, ui, repotype, path):
59 common.converter_sink.__init__(self, ui, repotype, path)
60 common.converter_sink.__init__(self, ui, repotype, path)
60 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
61 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
61 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
62 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
62 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
63 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
63 self.lastbranch = None
64 self.lastbranch = None
64 if os.path.isdir(path) and len(os.listdir(path)) > 0:
65 if os.path.isdir(path) and len(os.listdir(path)) > 0:
65 try:
66 try:
66 self.repo = hg.repository(self.ui, path)
67 self.repo = hg.repository(self.ui, path)
67 if not self.repo.local():
68 if not self.repo.local():
68 raise NoRepo(
69 raise NoRepo(
69 _(b'%s is not a local Mercurial repository') % path
70 _(b'%s is not a local Mercurial repository') % path
70 )
71 )
71 except error.RepoError as err:
72 except error.RepoError as err:
72 ui.traceback()
73 ui.traceback()
73 raise NoRepo(err.args[0])
74 raise NoRepo(err.args[0])
74 else:
75 else:
75 try:
76 try:
76 ui.status(_(b'initializing destination %s repository\n') % path)
77 ui.status(_(b'initializing destination %s repository\n') % path)
77 self.repo = hg.repository(self.ui, path, create=True)
78 self.repo = hg.repository(self.ui, path, create=True)
78 if not self.repo.local():
79 if not self.repo.local():
79 raise NoRepo(
80 raise NoRepo(
80 _(b'%s is not a local Mercurial repository') % path
81 _(b'%s is not a local Mercurial repository') % path
81 )
82 )
82 self.created.append(path)
83 self.created.append(path)
83 except error.RepoError:
84 except error.RepoError:
84 ui.traceback()
85 ui.traceback()
85 raise NoRepo(
86 raise NoRepo(
86 _(b"could not create hg repository %s as sink") % path
87 _(b"could not create hg repository %s as sink") % path
87 )
88 )
88 self.lock = None
89 self.lock = None
89 self.wlock = None
90 self.wlock = None
90 self.filemapmode = False
91 self.filemapmode = False
91 self.subrevmaps = {}
92 self.subrevmaps = {}
92
93
93 def before(self):
94 def before(self):
94 self.ui.debug(b'run hg sink pre-conversion action\n')
95 self.ui.debug(b'run hg sink pre-conversion action\n')
95 self.wlock = self.repo.wlock()
96 self.wlock = self.repo.wlock()
96 self.lock = self.repo.lock()
97 self.lock = self.repo.lock()
97
98
98 def after(self):
99 def after(self):
99 self.ui.debug(b'run hg sink post-conversion action\n')
100 self.ui.debug(b'run hg sink post-conversion action\n')
100 if self.lock:
101 if self.lock:
101 self.lock.release()
102 self.lock.release()
102 if self.wlock:
103 if self.wlock:
103 self.wlock.release()
104 self.wlock.release()
104
105
105 def revmapfile(self):
106 def revmapfile(self):
106 return self.repo.vfs.join(b"shamap")
107 return self.repo.vfs.join(b"shamap")
107
108
108 def authorfile(self):
109 def authorfile(self):
109 return self.repo.vfs.join(b"authormap")
110 return self.repo.vfs.join(b"authormap")
110
111
111 def setbranch(self, branch, pbranches):
112 def setbranch(self, branch, pbranches):
112 if not self.clonebranches:
113 if not self.clonebranches:
113 return
114 return
114
115
115 setbranch = branch != self.lastbranch
116 setbranch = branch != self.lastbranch
116 self.lastbranch = branch
117 self.lastbranch = branch
117 if not branch:
118 if not branch:
118 branch = b'default'
119 branch = b'default'
119 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
120 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
120
121
121 branchpath = os.path.join(self.path, branch)
122 branchpath = os.path.join(self.path, branch)
122 if setbranch:
123 if setbranch:
123 self.after()
124 self.after()
124 try:
125 try:
125 self.repo = hg.repository(self.ui, branchpath)
126 self.repo = hg.repository(self.ui, branchpath)
126 except Exception:
127 except Exception:
127 self.repo = hg.repository(self.ui, branchpath, create=True)
128 self.repo = hg.repository(self.ui, branchpath, create=True)
128 self.before()
129 self.before()
129
130
130 # pbranches may bring revisions from other branches (merge parents)
131 # pbranches may bring revisions from other branches (merge parents)
131 # Make sure we have them, or pull them.
132 # Make sure we have them, or pull them.
132 missings = {}
133 missings = {}
133 for b in pbranches:
134 for b in pbranches:
134 try:
135 try:
135 self.repo.lookup(b[0])
136 self.repo.lookup(b[0])
136 except Exception:
137 except Exception:
137 missings.setdefault(b[1], []).append(b[0])
138 missings.setdefault(b[1], []).append(b[0])
138
139
139 if missings:
140 if missings:
140 self.after()
141 self.after()
141 for pbranch, heads in sorted(pycompat.iteritems(missings)):
142 for pbranch, heads in sorted(pycompat.iteritems(missings)):
142 pbranchpath = os.path.join(self.path, pbranch)
143 pbranchpath = os.path.join(self.path, pbranch)
143 prepo = hg.peer(self.ui, {}, pbranchpath)
144 prepo = hg.peer(self.ui, {}, pbranchpath)
144 self.ui.note(
145 self.ui.note(
145 _(b'pulling from %s into %s\n') % (pbranch, branch)
146 _(b'pulling from %s into %s\n') % (pbranch, branch)
146 )
147 )
147 exchange.pull(
148 exchange.pull(
148 self.repo, prepo, heads=[prepo.lookup(h) for h in heads]
149 self.repo, prepo, heads=[prepo.lookup(h) for h in heads]
149 )
150 )
150 self.before()
151 self.before()
151
152
152 def _rewritetags(self, source, revmap, data):
153 def _rewritetags(self, source, revmap, data):
153 fp = stringio()
154 fp = stringio()
154 for line in data.splitlines():
155 for line in data.splitlines():
155 s = line.split(b' ', 1)
156 s = line.split(b' ', 1)
156 if len(s) != 2:
157 if len(s) != 2:
157 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
158 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
158 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
159 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
159 continue
160 continue
160 revid = revmap.get(source.lookuprev(s[0]))
161 revid = revmap.get(source.lookuprev(s[0]))
161 if not revid:
162 if not revid:
162 if s[0] == sha1nodeconstants.nullhex:
163 if s[0] == sha1nodeconstants.nullhex:
163 revid = s[0]
164 revid = s[0]
164 else:
165 else:
165 # missing, but keep for hash stability
166 # missing, but keep for hash stability
166 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
167 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
167 fp.write(b'%s\n' % line)
168 fp.write(b'%s\n' % line)
168 continue
169 continue
169 fp.write(b'%s %s\n' % (revid, s[1]))
170 fp.write(b'%s %s\n' % (revid, s[1]))
170 return fp.getvalue()
171 return fp.getvalue()
171
172
172 def _rewritesubstate(self, source, data):
173 def _rewritesubstate(self, source, data):
173 fp = stringio()
174 fp = stringio()
174 for line in data.splitlines():
175 for line in data.splitlines():
175 s = line.split(b' ', 1)
176 s = line.split(b' ', 1)
176 if len(s) != 2:
177 if len(s) != 2:
177 continue
178 continue
178
179
179 revid = s[0]
180 revid = s[0]
180 subpath = s[1]
181 subpath = s[1]
181 if revid != sha1nodeconstants.nullhex:
182 if revid != sha1nodeconstants.nullhex:
182 revmap = self.subrevmaps.get(subpath)
183 revmap = self.subrevmaps.get(subpath)
183 if revmap is None:
184 if revmap is None:
184 revmap = mapfile(
185 revmap = mapfile(
185 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
186 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
186 )
187 )
187 self.subrevmaps[subpath] = revmap
188 self.subrevmaps[subpath] = revmap
188
189
189 # It is reasonable that one or more of the subrepos don't
190 # It is reasonable that one or more of the subrepos don't
190 # need to be converted, in which case they can be cloned
191 # need to be converted, in which case they can be cloned
191 # into place instead of converted. Therefore, only warn
192 # into place instead of converted. Therefore, only warn
192 # once.
193 # once.
193 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
194 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
194 if len(revmap) == 0:
195 if len(revmap) == 0:
195 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
196 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
196
197
197 if self.repo.wvfs.exists(sub):
198 if self.repo.wvfs.exists(sub):
198 self.ui.warn(msg % subpath)
199 self.ui.warn(msg % subpath)
199
200
200 newid = revmap.get(revid)
201 newid = revmap.get(revid)
201 if not newid:
202 if not newid:
202 if len(revmap) > 0:
203 if len(revmap) > 0:
203 self.ui.warn(
204 self.ui.warn(
204 _(b"%s is missing from %s/.hg/shamap\n")
205 _(b"%s is missing from %s/.hg/shamap\n")
205 % (revid, subpath)
206 % (revid, subpath)
206 )
207 )
207 else:
208 else:
208 revid = newid
209 revid = newid
209
210
210 fp.write(b'%s %s\n' % (revid, subpath))
211 fp.write(b'%s %s\n' % (revid, subpath))
211
212
212 return fp.getvalue()
213 return fp.getvalue()
213
214
214 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
215 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
215 """Calculates the files from p2 that we need to pull in when merging p1
216 """Calculates the files from p2 that we need to pull in when merging p1
216 and p2, given that the merge is coming from the given source.
217 and p2, given that the merge is coming from the given source.
217
218
218 This prevents us from losing files that only exist in the target p2 and
219 This prevents us from losing files that only exist in the target p2 and
219 that don't come from the source repo (like if you're merging multiple
220 that don't come from the source repo (like if you're merging multiple
220 repositories together).
221 repositories together).
221 """
222 """
222 anc = [p1ctx.ancestor(p2ctx)]
223 anc = [p1ctx.ancestor(p2ctx)]
223 # Calculate what files are coming from p2
224 # Calculate what files are coming from p2
224 # TODO: mresult.commitinfo might be able to get that info
225 # TODO: mresult.commitinfo might be able to get that info
225 mresult = mergemod.calculateupdates(
226 mresult = mergemod.calculateupdates(
226 self.repo,
227 self.repo,
227 p1ctx,
228 p1ctx,
228 p2ctx,
229 p2ctx,
229 anc,
230 anc,
230 branchmerge=True,
231 branchmerge=True,
231 force=True,
232 force=True,
232 acceptremote=False,
233 acceptremote=False,
233 followcopies=False,
234 followcopies=False,
234 )
235 )
235
236
236 for file, (action, info, msg) in mresult.filemap():
237 for file, (action, info, msg) in mresult.filemap():
237 if source.targetfilebelongstosource(file):
238 if source.targetfilebelongstosource(file):
238 # If the file belongs to the source repo, ignore the p2
239 # If the file belongs to the source repo, ignore the p2
239 # since it will be covered by the existing fileset.
240 # since it will be covered by the existing fileset.
240 continue
241 continue
241
242
242 # If the file requires actual merging, abort. We don't have enough
243 # If the file requires actual merging, abort. We don't have enough
243 # context to resolve merges correctly.
244 # context to resolve merges correctly.
244 if action in [b'm', b'dm', b'cd', b'dc']:
245 if action in mergestate.CONVERT_MERGE_ACTIONS:
245 raise error.Abort(
246 raise error.Abort(
246 _(
247 _(
247 b"unable to convert merge commit "
248 b"unable to convert merge commit "
248 b"since target parents do not merge cleanly (file "
249 b"since target parents do not merge cleanly (file "
249 b"%s, parents %s and %s)"
250 b"%s, parents %s and %s)"
250 )
251 )
251 % (file, p1ctx, p2ctx)
252 % (file, p1ctx, p2ctx)
252 )
253 )
253 elif action == b'k':
254 elif action == mergestate.ACTION_KEEP:
254 # 'keep' means nothing changed from p1
255 # 'keep' means nothing changed from p1
255 continue
256 continue
256 else:
257 else:
257 # Any other change means we want to take the p2 version
258 # Any other change means we want to take the p2 version
258 yield file
259 yield file
259
260
260 def putcommit(
261 def putcommit(
261 self, files, copies, parents, commit, source, revmap, full, cleanp2
262 self, files, copies, parents, commit, source, revmap, full, cleanp2
262 ):
263 ):
263 files = dict(files)
264 files = dict(files)
264
265
265 def getfilectx(repo, memctx, f):
266 def getfilectx(repo, memctx, f):
266 if p2ctx and f in p2files and f not in copies:
267 if p2ctx and f in p2files and f not in copies:
267 self.ui.debug(b'reusing %s from p2\n' % f)
268 self.ui.debug(b'reusing %s from p2\n' % f)
268 try:
269 try:
269 return p2ctx[f]
270 return p2ctx[f]
270 except error.ManifestLookupError:
271 except error.ManifestLookupError:
271 # If the file doesn't exist in p2, then we're syncing a
272 # If the file doesn't exist in p2, then we're syncing a
272 # delete, so just return None.
273 # delete, so just return None.
273 return None
274 return None
274 try:
275 try:
275 v = files[f]
276 v = files[f]
276 except KeyError:
277 except KeyError:
277 return None
278 return None
278 data, mode = source.getfile(f, v)
279 data, mode = source.getfile(f, v)
279 if data is None:
280 if data is None:
280 return None
281 return None
281 if f == b'.hgtags':
282 if f == b'.hgtags':
282 data = self._rewritetags(source, revmap, data)
283 data = self._rewritetags(source, revmap, data)
283 if f == b'.hgsubstate':
284 if f == b'.hgsubstate':
284 data = self._rewritesubstate(source, data)
285 data = self._rewritesubstate(source, data)
285 return context.memfilectx(
286 return context.memfilectx(
286 self.repo,
287 self.repo,
287 memctx,
288 memctx,
288 f,
289 f,
289 data,
290 data,
290 b'l' in mode,
291 b'l' in mode,
291 b'x' in mode,
292 b'x' in mode,
292 copies.get(f),
293 copies.get(f),
293 )
294 )
294
295
295 pl = []
296 pl = []
296 for p in parents:
297 for p in parents:
297 if p not in pl:
298 if p not in pl:
298 pl.append(p)
299 pl.append(p)
299 parents = pl
300 parents = pl
300 nparents = len(parents)
301 nparents = len(parents)
301 if self.filemapmode and nparents == 1:
302 if self.filemapmode and nparents == 1:
302 m1node = self.repo.changelog.read(bin(parents[0]))[0]
303 m1node = self.repo.changelog.read(bin(parents[0]))[0]
303 parent = parents[0]
304 parent = parents[0]
304
305
305 if len(parents) < 2:
306 if len(parents) < 2:
306 parents.append(self.repo.nullid)
307 parents.append(self.repo.nullid)
307 if len(parents) < 2:
308 if len(parents) < 2:
308 parents.append(self.repo.nullid)
309 parents.append(self.repo.nullid)
309 p2 = parents.pop(0)
310 p2 = parents.pop(0)
310
311
311 text = commit.desc
312 text = commit.desc
312
313
313 sha1s = re.findall(sha1re, text)
314 sha1s = re.findall(sha1re, text)
314 for sha1 in sha1s:
315 for sha1 in sha1s:
315 oldrev = source.lookuprev(sha1)
316 oldrev = source.lookuprev(sha1)
316 newrev = revmap.get(oldrev)
317 newrev = revmap.get(oldrev)
317 if newrev is not None:
318 if newrev is not None:
318 text = text.replace(sha1, newrev[: len(sha1)])
319 text = text.replace(sha1, newrev[: len(sha1)])
319
320
320 extra = commit.extra.copy()
321 extra = commit.extra.copy()
321
322
322 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
323 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
323 if sourcename:
324 if sourcename:
324 extra[b'convert_source'] = sourcename
325 extra[b'convert_source'] = sourcename
325
326
326 for label in (
327 for label in (
327 b'source',
328 b'source',
328 b'transplant_source',
329 b'transplant_source',
329 b'rebase_source',
330 b'rebase_source',
330 b'intermediate-source',
331 b'intermediate-source',
331 ):
332 ):
332 node = extra.get(label)
333 node = extra.get(label)
333
334
334 if node is None:
335 if node is None:
335 continue
336 continue
336
337
337 # Only transplant stores its reference in binary
338 # Only transplant stores its reference in binary
338 if label == b'transplant_source':
339 if label == b'transplant_source':
339 node = hex(node)
340 node = hex(node)
340
341
341 newrev = revmap.get(node)
342 newrev = revmap.get(node)
342 if newrev is not None:
343 if newrev is not None:
343 if label == b'transplant_source':
344 if label == b'transplant_source':
344 newrev = bin(newrev)
345 newrev = bin(newrev)
345
346
346 extra[label] = newrev
347 extra[label] = newrev
347
348
348 if self.branchnames and commit.branch:
349 if self.branchnames and commit.branch:
349 extra[b'branch'] = commit.branch
350 extra[b'branch'] = commit.branch
350 if commit.rev and commit.saverev:
351 if commit.rev and commit.saverev:
351 extra[b'convert_revision'] = commit.rev
352 extra[b'convert_revision'] = commit.rev
352
353
353 while parents:
354 while parents:
354 p1 = p2
355 p1 = p2
355 p2 = parents.pop(0)
356 p2 = parents.pop(0)
356 p1ctx = self.repo[p1]
357 p1ctx = self.repo[p1]
357 p2ctx = None
358 p2ctx = None
358 if p2 != self.repo.nullid:
359 if p2 != self.repo.nullid:
359 p2ctx = self.repo[p2]
360 p2ctx = self.repo[p2]
360 fileset = set(files)
361 fileset = set(files)
361 if full:
362 if full:
362 fileset.update(self.repo[p1])
363 fileset.update(self.repo[p1])
363 fileset.update(self.repo[p2])
364 fileset.update(self.repo[p2])
364
365
365 if p2ctx:
366 if p2ctx:
366 p2files = set(cleanp2)
367 p2files = set(cleanp2)
367 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
368 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
368 p2files.add(file)
369 p2files.add(file)
369 fileset.add(file)
370 fileset.add(file)
370
371
371 ctx = context.memctx(
372 ctx = context.memctx(
372 self.repo,
373 self.repo,
373 (p1, p2),
374 (p1, p2),
374 text,
375 text,
375 fileset,
376 fileset,
376 getfilectx,
377 getfilectx,
377 commit.author,
378 commit.author,
378 commit.date,
379 commit.date,
379 extra,
380 extra,
380 )
381 )
381
382
382 # We won't know if the conversion changes the node until after the
383 # We won't know if the conversion changes the node until after the
383 # commit, so copy the source's phase for now.
384 # commit, so copy the source's phase for now.
384 self.repo.ui.setconfig(
385 self.repo.ui.setconfig(
385 b'phases',
386 b'phases',
386 b'new-commit',
387 b'new-commit',
387 phases.phasenames[commit.phase],
388 phases.phasenames[commit.phase],
388 b'convert',
389 b'convert',
389 )
390 )
390
391
391 with self.repo.transaction(b"convert") as tr:
392 with self.repo.transaction(b"convert") as tr:
392 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
393 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
393 origctx = commit.ctx
394 origctx = commit.ctx
394 else:
395 else:
395 origctx = None
396 origctx = None
396 node = hex(self.repo.commitctx(ctx, origctx=origctx))
397 node = hex(self.repo.commitctx(ctx, origctx=origctx))
397
398
398 # If the node value has changed, but the phase is lower than
399 # If the node value has changed, but the phase is lower than
399 # draft, set it back to draft since it hasn't been exposed
400 # draft, set it back to draft since it hasn't been exposed
400 # anywhere.
401 # anywhere.
401 if commit.rev != node:
402 if commit.rev != node:
402 ctx = self.repo[node]
403 ctx = self.repo[node]
403 if ctx.phase() < phases.draft:
404 if ctx.phase() < phases.draft:
404 phases.registernew(
405 phases.registernew(
405 self.repo, tr, phases.draft, [ctx.rev()]
406 self.repo, tr, phases.draft, [ctx.rev()]
406 )
407 )
407
408
408 text = b"(octopus merge fixup)\n"
409 text = b"(octopus merge fixup)\n"
409 p2 = node
410 p2 = node
410
411
411 if self.filemapmode and nparents == 1:
412 if self.filemapmode and nparents == 1:
412 man = self.repo.manifestlog.getstorage(b'')
413 man = self.repo.manifestlog.getstorage(b'')
413 mnode = self.repo.changelog.read(bin(p2))[0]
414 mnode = self.repo.changelog.read(bin(p2))[0]
414 closed = b'close' in commit.extra
415 closed = b'close' in commit.extra
415 if not closed and not man.cmp(m1node, man.revision(mnode)):
416 if not closed and not man.cmp(m1node, man.revision(mnode)):
416 self.ui.status(_(b"filtering out empty revision\n"))
417 self.ui.status(_(b"filtering out empty revision\n"))
417 self.repo.rollback(force=True)
418 self.repo.rollback(force=True)
418 return parent
419 return parent
419 return p2
420 return p2
420
421
421 def puttags(self, tags):
422 def puttags(self, tags):
422 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
423 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
423 tagparent = tagparent or self.repo.nullid
424 tagparent = tagparent or self.repo.nullid
424
425
425 oldlines = set()
426 oldlines = set()
426 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
427 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
427 for h in heads:
428 for h in heads:
428 if b'.hgtags' in self.repo[h]:
429 if b'.hgtags' in self.repo[h]:
429 oldlines.update(
430 oldlines.update(
430 set(self.repo[h][b'.hgtags'].data().splitlines(True))
431 set(self.repo[h][b'.hgtags'].data().splitlines(True))
431 )
432 )
432 oldlines = sorted(list(oldlines))
433 oldlines = sorted(list(oldlines))
433
434
434 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
435 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
435 if newlines == oldlines:
436 if newlines == oldlines:
436 return None, None
437 return None, None
437
438
438 # if the old and new tags match, then there is nothing to update
439 # if the old and new tags match, then there is nothing to update
439 oldtags = set()
440 oldtags = set()
440 newtags = set()
441 newtags = set()
441 for line in oldlines:
442 for line in oldlines:
442 s = line.strip().split(b' ', 1)
443 s = line.strip().split(b' ', 1)
443 if len(s) != 2:
444 if len(s) != 2:
444 continue
445 continue
445 oldtags.add(s[1])
446 oldtags.add(s[1])
446 for line in newlines:
447 for line in newlines:
447 s = line.strip().split(b' ', 1)
448 s = line.strip().split(b' ', 1)
448 if len(s) != 2:
449 if len(s) != 2:
449 continue
450 continue
450 if s[1] not in oldtags:
451 if s[1] not in oldtags:
451 newtags.add(s[1].strip())
452 newtags.add(s[1].strip())
452
453
453 if not newtags:
454 if not newtags:
454 return None, None
455 return None, None
455
456
456 data = b"".join(newlines)
457 data = b"".join(newlines)
457
458
458 def getfilectx(repo, memctx, f):
459 def getfilectx(repo, memctx, f):
459 return context.memfilectx(repo, memctx, f, data, False, False, None)
460 return context.memfilectx(repo, memctx, f, data, False, False, None)
460
461
461 self.ui.status(_(b"updating tags\n"))
462 self.ui.status(_(b"updating tags\n"))
462 date = b"%d 0" % int(time.mktime(time.gmtime()))
463 date = b"%d 0" % int(time.mktime(time.gmtime()))
463 extra = {b'branch': self.tagsbranch}
464 extra = {b'branch': self.tagsbranch}
464 ctx = context.memctx(
465 ctx = context.memctx(
465 self.repo,
466 self.repo,
466 (tagparent, None),
467 (tagparent, None),
467 b"update tags",
468 b"update tags",
468 [b".hgtags"],
469 [b".hgtags"],
469 getfilectx,
470 getfilectx,
470 b"convert-repo",
471 b"convert-repo",
471 date,
472 date,
472 extra,
473 extra,
473 )
474 )
474 node = self.repo.commitctx(ctx)
475 node = self.repo.commitctx(ctx)
475 return hex(node), hex(tagparent)
476 return hex(node), hex(tagparent)
476
477
477 def setfilemapmode(self, active):
478 def setfilemapmode(self, active):
478 self.filemapmode = active
479 self.filemapmode = active
479
480
480 def putbookmarks(self, updatedbookmark):
481 def putbookmarks(self, updatedbookmark):
481 if not len(updatedbookmark):
482 if not len(updatedbookmark):
482 return
483 return
483 wlock = lock = tr = None
484 wlock = lock = tr = None
484 try:
485 try:
485 wlock = self.repo.wlock()
486 wlock = self.repo.wlock()
486 lock = self.repo.lock()
487 lock = self.repo.lock()
487 tr = self.repo.transaction(b'bookmark')
488 tr = self.repo.transaction(b'bookmark')
488 self.ui.status(_(b"updating bookmarks\n"))
489 self.ui.status(_(b"updating bookmarks\n"))
489 destmarks = self.repo._bookmarks
490 destmarks = self.repo._bookmarks
490 changes = [
491 changes = [
491 (bookmark, bin(updatedbookmark[bookmark]))
492 (bookmark, bin(updatedbookmark[bookmark]))
492 for bookmark in updatedbookmark
493 for bookmark in updatedbookmark
493 ]
494 ]
494 destmarks.applychanges(self.repo, tr, changes)
495 destmarks.applychanges(self.repo, tr, changes)
495 tr.close()
496 tr.close()
496 finally:
497 finally:
497 lockmod.release(lock, wlock, tr)
498 lockmod.release(lock, wlock, tr)
498
499
499 def hascommitfrommap(self, rev):
500 def hascommitfrommap(self, rev):
500 # the exact semantics of clonebranches is unclear so we can't say no
501 # the exact semantics of clonebranches is unclear so we can't say no
501 return rev in self.repo or self.clonebranches
502 return rev in self.repo or self.clonebranches
502
503
503 def hascommitforsplicemap(self, rev):
504 def hascommitforsplicemap(self, rev):
504 if rev not in self.repo and self.clonebranches:
505 if rev not in self.repo and self.clonebranches:
505 raise error.Abort(
506 raise error.Abort(
506 _(
507 _(
507 b'revision %s not found in destination '
508 b'revision %s not found in destination '
508 b'repository (lookups with clonebranches=true '
509 b'repository (lookups with clonebranches=true '
509 b'are not implemented)'
510 b'are not implemented)'
510 )
511 )
511 % rev
512 % rev
512 )
513 )
513 return rev in self.repo
514 return rev in self.repo
514
515
515
516
516 class mercurial_source(common.converter_source):
517 class mercurial_source(common.converter_source):
517 def __init__(self, ui, repotype, path, revs=None):
518 def __init__(self, ui, repotype, path, revs=None):
518 common.converter_source.__init__(self, ui, repotype, path, revs)
519 common.converter_source.__init__(self, ui, repotype, path, revs)
519 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
520 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
520 self.ignored = set()
521 self.ignored = set()
521 self.saverev = ui.configbool(b'convert', b'hg.saverev')
522 self.saverev = ui.configbool(b'convert', b'hg.saverev')
522 try:
523 try:
523 self.repo = hg.repository(self.ui, path)
524 self.repo = hg.repository(self.ui, path)
524 # try to provoke an exception if this isn't really a hg
525 # try to provoke an exception if this isn't really a hg
525 # repo, but some other bogus compatible-looking url
526 # repo, but some other bogus compatible-looking url
526 if not self.repo.local():
527 if not self.repo.local():
527 raise error.RepoError
528 raise error.RepoError
528 except error.RepoError:
529 except error.RepoError:
529 ui.traceback()
530 ui.traceback()
530 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
531 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
531 self.lastrev = None
532 self.lastrev = None
532 self.lastctx = None
533 self.lastctx = None
533 self._changescache = None, None
534 self._changescache = None, None
534 self.convertfp = None
535 self.convertfp = None
535 # Restrict converted revisions to startrev descendants
536 # Restrict converted revisions to startrev descendants
536 startnode = ui.config(b'convert', b'hg.startrev')
537 startnode = ui.config(b'convert', b'hg.startrev')
537 hgrevs = ui.config(b'convert', b'hg.revs')
538 hgrevs = ui.config(b'convert', b'hg.revs')
538 if hgrevs is None:
539 if hgrevs is None:
539 if startnode is not None:
540 if startnode is not None:
540 try:
541 try:
541 startnode = self.repo.lookup(startnode)
542 startnode = self.repo.lookup(startnode)
542 except error.RepoError:
543 except error.RepoError:
543 raise error.Abort(
544 raise error.Abort(
544 _(b'%s is not a valid start revision') % startnode
545 _(b'%s is not a valid start revision') % startnode
545 )
546 )
546 startrev = self.repo.changelog.rev(startnode)
547 startrev = self.repo.changelog.rev(startnode)
547 children = {startnode: 1}
548 children = {startnode: 1}
548 for r in self.repo.changelog.descendants([startrev]):
549 for r in self.repo.changelog.descendants([startrev]):
549 children[self.repo.changelog.node(r)] = 1
550 children[self.repo.changelog.node(r)] = 1
550 self.keep = children.__contains__
551 self.keep = children.__contains__
551 else:
552 else:
552 self.keep = util.always
553 self.keep = util.always
553 if revs:
554 if revs:
554 self._heads = [self.repo.lookup(r) for r in revs]
555 self._heads = [self.repo.lookup(r) for r in revs]
555 else:
556 else:
556 self._heads = self.repo.heads()
557 self._heads = self.repo.heads()
557 else:
558 else:
558 if revs or startnode is not None:
559 if revs or startnode is not None:
559 raise error.Abort(
560 raise error.Abort(
560 _(
561 _(
561 b'hg.revs cannot be combined with '
562 b'hg.revs cannot be combined with '
562 b'hg.startrev or --rev'
563 b'hg.startrev or --rev'
563 )
564 )
564 )
565 )
565 nodes = set()
566 nodes = set()
566 parents = set()
567 parents = set()
567 for r in logcmdutil.revrange(self.repo, [hgrevs]):
568 for r in logcmdutil.revrange(self.repo, [hgrevs]):
568 ctx = self.repo[r]
569 ctx = self.repo[r]
569 nodes.add(ctx.node())
570 nodes.add(ctx.node())
570 parents.update(p.node() for p in ctx.parents())
571 parents.update(p.node() for p in ctx.parents())
571 self.keep = nodes.__contains__
572 self.keep = nodes.__contains__
572 self._heads = nodes - parents
573 self._heads = nodes - parents
573
574
574 def _changectx(self, rev):
575 def _changectx(self, rev):
575 if self.lastrev != rev:
576 if self.lastrev != rev:
576 self.lastctx = self.repo[rev]
577 self.lastctx = self.repo[rev]
577 self.lastrev = rev
578 self.lastrev = rev
578 return self.lastctx
579 return self.lastctx
579
580
580 def _parents(self, ctx):
581 def _parents(self, ctx):
581 return [p for p in ctx.parents() if p and self.keep(p.node())]
582 return [p for p in ctx.parents() if p and self.keep(p.node())]
582
583
583 def getheads(self):
584 def getheads(self):
584 return [hex(h) for h in self._heads if self.keep(h)]
585 return [hex(h) for h in self._heads if self.keep(h)]
585
586
586 def getfile(self, name, rev):
587 def getfile(self, name, rev):
587 try:
588 try:
588 fctx = self._changectx(rev)[name]
589 fctx = self._changectx(rev)[name]
589 return fctx.data(), fctx.flags()
590 return fctx.data(), fctx.flags()
590 except error.LookupError:
591 except error.LookupError:
591 return None, None
592 return None, None
592
593
593 def _changedfiles(self, ctx1, ctx2):
594 def _changedfiles(self, ctx1, ctx2):
594 ma, r = [], []
595 ma, r = [], []
595 maappend = ma.append
596 maappend = ma.append
596 rappend = r.append
597 rappend = r.append
597 d = ctx1.manifest().diff(ctx2.manifest())
598 d = ctx1.manifest().diff(ctx2.manifest())
598 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
599 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
599 if node2 is None:
600 if node2 is None:
600 rappend(f)
601 rappend(f)
601 else:
602 else:
602 maappend(f)
603 maappend(f)
603 return ma, r
604 return ma, r
604
605
605 def getchanges(self, rev, full):
606 def getchanges(self, rev, full):
606 ctx = self._changectx(rev)
607 ctx = self._changectx(rev)
607 parents = self._parents(ctx)
608 parents = self._parents(ctx)
608 if full or not parents:
609 if full or not parents:
609 files = copyfiles = ctx.manifest()
610 files = copyfiles = ctx.manifest()
610 if parents:
611 if parents:
611 if self._changescache[0] == rev:
612 if self._changescache[0] == rev:
612 ma, r = self._changescache[1]
613 ma, r = self._changescache[1]
613 else:
614 else:
614 ma, r = self._changedfiles(parents[0], ctx)
615 ma, r = self._changedfiles(parents[0], ctx)
615 if not full:
616 if not full:
616 files = ma + r
617 files = ma + r
617 copyfiles = ma
618 copyfiles = ma
618 # _getcopies() is also run for roots and before filtering so missing
619 # _getcopies() is also run for roots and before filtering so missing
619 # revlogs are detected early
620 # revlogs are detected early
620 copies = self._getcopies(ctx, parents, copyfiles)
621 copies = self._getcopies(ctx, parents, copyfiles)
621 cleanp2 = set()
622 cleanp2 = set()
622 if len(parents) == 2:
623 if len(parents) == 2:
623 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
624 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
624 for f, value in pycompat.iteritems(d):
625 for f, value in pycompat.iteritems(d):
625 if value is None:
626 if value is None:
626 cleanp2.add(f)
627 cleanp2.add(f)
627 changes = [(f, rev) for f in files if f not in self.ignored]
628 changes = [(f, rev) for f in files if f not in self.ignored]
628 changes.sort()
629 changes.sort()
629 return changes, copies, cleanp2
630 return changes, copies, cleanp2
630
631
631 def _getcopies(self, ctx, parents, files):
632 def _getcopies(self, ctx, parents, files):
632 copies = {}
633 copies = {}
633 for name in files:
634 for name in files:
634 if name in self.ignored:
635 if name in self.ignored:
635 continue
636 continue
636 try:
637 try:
637 copysource = ctx.filectx(name).copysource()
638 copysource = ctx.filectx(name).copysource()
638 if copysource in self.ignored:
639 if copysource in self.ignored:
639 continue
640 continue
640 # Ignore copy sources not in parent revisions
641 # Ignore copy sources not in parent revisions
641 if not any(copysource in p for p in parents):
642 if not any(copysource in p for p in parents):
642 continue
643 continue
643 copies[name] = copysource
644 copies[name] = copysource
644 except TypeError:
645 except TypeError:
645 pass
646 pass
646 except error.LookupError as e:
647 except error.LookupError as e:
647 if not self.ignoreerrors:
648 if not self.ignoreerrors:
648 raise
649 raise
649 self.ignored.add(name)
650 self.ignored.add(name)
650 self.ui.warn(_(b'ignoring: %s\n') % e)
651 self.ui.warn(_(b'ignoring: %s\n') % e)
651 return copies
652 return copies
652
653
653 def getcommit(self, rev):
654 def getcommit(self, rev):
654 ctx = self._changectx(rev)
655 ctx = self._changectx(rev)
655 _parents = self._parents(ctx)
656 _parents = self._parents(ctx)
656 parents = [p.hex() for p in _parents]
657 parents = [p.hex() for p in _parents]
657 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
658 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
658 crev = rev
659 crev = rev
659
660
660 return common.commit(
661 return common.commit(
661 author=ctx.user(),
662 author=ctx.user(),
662 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
663 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
663 desc=ctx.description(),
664 desc=ctx.description(),
664 rev=crev,
665 rev=crev,
665 parents=parents,
666 parents=parents,
666 optparents=optparents,
667 optparents=optparents,
667 branch=ctx.branch(),
668 branch=ctx.branch(),
668 extra=ctx.extra(),
669 extra=ctx.extra(),
669 sortkey=ctx.rev(),
670 sortkey=ctx.rev(),
670 saverev=self.saverev,
671 saverev=self.saverev,
671 phase=ctx.phase(),
672 phase=ctx.phase(),
672 ctx=ctx,
673 ctx=ctx,
673 )
674 )
674
675
675 def numcommits(self):
676 def numcommits(self):
676 return len(self.repo)
677 return len(self.repo)
677
678
678 def gettags(self):
679 def gettags(self):
679 # This will get written to .hgtags, filter non global tags out.
680 # This will get written to .hgtags, filter non global tags out.
680 tags = [
681 tags = [
681 t
682 t
682 for t in self.repo.tagslist()
683 for t in self.repo.tagslist()
683 if self.repo.tagtype(t[0]) == b'global'
684 if self.repo.tagtype(t[0]) == b'global'
684 ]
685 ]
685 return {name: hex(node) for name, node in tags if self.keep(node)}
686 return {name: hex(node) for name, node in tags if self.keep(node)}
686
687
687 def getchangedfiles(self, rev, i):
688 def getchangedfiles(self, rev, i):
688 ctx = self._changectx(rev)
689 ctx = self._changectx(rev)
689 parents = self._parents(ctx)
690 parents = self._parents(ctx)
690 if not parents and i is None:
691 if not parents and i is None:
691 i = 0
692 i = 0
692 ma, r = ctx.manifest().keys(), []
693 ma, r = ctx.manifest().keys(), []
693 else:
694 else:
694 i = i or 0
695 i = i or 0
695 ma, r = self._changedfiles(parents[i], ctx)
696 ma, r = self._changedfiles(parents[i], ctx)
696 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
697 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
697
698
698 if i == 0:
699 if i == 0:
699 self._changescache = (rev, (ma, r))
700 self._changescache = (rev, (ma, r))
700
701
701 return ma + r
702 return ma + r
702
703
703 def converted(self, rev, destrev):
704 def converted(self, rev, destrev):
704 if self.convertfp is None:
705 if self.convertfp is None:
705 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
706 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
706 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
707 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
707 self.convertfp.flush()
708 self.convertfp.flush()
708
709
709 def before(self):
710 def before(self):
710 self.ui.debug(b'run hg source pre-conversion action\n')
711 self.ui.debug(b'run hg source pre-conversion action\n')
711
712
712 def after(self):
713 def after(self):
713 self.ui.debug(b'run hg source post-conversion action\n')
714 self.ui.debug(b'run hg source post-conversion action\n')
714
715
715 def hasnativeorder(self):
716 def hasnativeorder(self):
716 return True
717 return True
717
718
718 def hasnativeclose(self):
719 def hasnativeclose(self):
719 return True
720 return True
720
721
721 def lookuprev(self, rev):
722 def lookuprev(self, rev):
722 try:
723 try:
723 return hex(self.repo.lookup(rev))
724 return hex(self.repo.lookup(rev))
724 except (error.RepoError, error.LookupError):
725 except (error.RepoError, error.LookupError):
725 return None
726 return None
726
727
727 def getbookmarks(self):
728 def getbookmarks(self):
728 return bookmarks.listbookmarks(self.repo)
729 return bookmarks.listbookmarks(self.repo)
729
730
730 def checkrevformat(self, revstr, mapname=b'splicemap'):
731 def checkrevformat(self, revstr, mapname=b'splicemap'):
731 """Mercurial, revision string is a 40 byte hex"""
732 """Mercurial, revision string is a 40 byte hex"""
732 self.checkhexformat(revstr, mapname)
733 self.checkhexformat(revstr, mapname)
@@ -1,971 +1,958 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 value of None. Only fixer tools that executed are present in the metadata.
106 value of None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run in the repository's root directory. This allows them to read
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import (
134 from mercurial.node import (
135 nullid,
135 nullid,
136 nullrev,
136 nullrev,
137 wdirrev,
137 wdirrev,
138 )
138 )
139
139
140 from mercurial.utils import procutil
140 from mercurial.utils import procutil
141
141
142 from mercurial import (
142 from mercurial import (
143 cmdutil,
143 cmdutil,
144 context,
144 context,
145 copies,
145 copies,
146 error,
146 error,
147 logcmdutil,
147 logcmdutil,
148 match as matchmod,
148 match as matchmod,
149 mdiff,
149 mdiff,
150 merge,
150 merge,
151 mergestate as mergestatemod,
151 mergestate as mergestatemod,
152 obsolete,
153 pycompat,
152 pycompat,
154 registrar,
153 registrar,
155 rewriteutil,
154 rewriteutil,
156 scmutil,
155 scmutil,
157 util,
156 util,
158 worker,
157 worker,
159 )
158 )
160
159
161 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
160 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
162 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
161 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
163 # be specifying the version(s) of Mercurial they are tested with, or
162 # be specifying the version(s) of Mercurial they are tested with, or
164 # leave the attribute unspecified.
163 # leave the attribute unspecified.
165 testedwith = b'ships-with-hg-core'
164 testedwith = b'ships-with-hg-core'
166
165
167 cmdtable = {}
166 cmdtable = {}
168 command = registrar.command(cmdtable)
167 command = registrar.command(cmdtable)
169
168
170 configtable = {}
169 configtable = {}
171 configitem = registrar.configitem(configtable)
170 configitem = registrar.configitem(configtable)
172
171
173 # Register the suboptions allowed for each configured fixer, and default values.
172 # Register the suboptions allowed for each configured fixer, and default values.
174 FIXER_ATTRS = {
173 FIXER_ATTRS = {
175 b'command': None,
174 b'command': None,
176 b'linerange': None,
175 b'linerange': None,
177 b'pattern': None,
176 b'pattern': None,
178 b'priority': 0,
177 b'priority': 0,
179 b'metadata': False,
178 b'metadata': False,
180 b'skipclean': True,
179 b'skipclean': True,
181 b'enabled': True,
180 b'enabled': True,
182 }
181 }
183
182
184 for key, default in FIXER_ATTRS.items():
183 for key, default in FIXER_ATTRS.items():
185 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
184 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
186
185
187 # A good default size allows most source code files to be fixed, but avoids
186 # A good default size allows most source code files to be fixed, but avoids
188 # letting fixer tools choke on huge inputs, which could be surprising to the
187 # letting fixer tools choke on huge inputs, which could be surprising to the
189 # user.
188 # user.
190 configitem(b'fix', b'maxfilesize', default=b'2MB')
189 configitem(b'fix', b'maxfilesize', default=b'2MB')
191
190
192 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
191 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
193 # This helps users do shell scripts that stop when a fixer tool signals a
192 # This helps users do shell scripts that stop when a fixer tool signals a
194 # problem.
193 # problem.
195 configitem(b'fix', b'failure', default=b'continue')
194 configitem(b'fix', b'failure', default=b'continue')
196
195
197
196
198 def checktoolfailureaction(ui, message, hint=None):
197 def checktoolfailureaction(ui, message, hint=None):
199 """Abort with 'message' if fix.failure=abort"""
198 """Abort with 'message' if fix.failure=abort"""
200 action = ui.config(b'fix', b'failure')
199 action = ui.config(b'fix', b'failure')
201 if action not in (b'continue', b'abort'):
200 if action not in (b'continue', b'abort'):
202 raise error.Abort(
201 raise error.Abort(
203 _(b'unknown fix.failure action: %s') % (action,),
202 _(b'unknown fix.failure action: %s') % (action,),
204 hint=_(b'use "continue" or "abort"'),
203 hint=_(b'use "continue" or "abort"'),
205 )
204 )
206 if action == b'abort':
205 if action == b'abort':
207 raise error.Abort(message, hint=hint)
206 raise error.Abort(message, hint=hint)
208
207
209
208
210 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
209 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
211 baseopt = (
210 baseopt = (
212 b'',
211 b'',
213 b'base',
212 b'base',
214 [],
213 [],
215 _(
214 _(
216 b'revisions to diff against (overrides automatic '
215 b'revisions to diff against (overrides automatic '
217 b'selection, and applies to every revision being '
216 b'selection, and applies to every revision being '
218 b'fixed)'
217 b'fixed)'
219 ),
218 ),
220 _(b'REV'),
219 _(b'REV'),
221 )
220 )
222 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
221 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
223 sourceopt = (
222 sourceopt = (
224 b's',
223 b's',
225 b'source',
224 b'source',
226 [],
225 [],
227 _(b'fix the specified revisions and their descendants'),
226 _(b'fix the specified revisions and their descendants'),
228 _(b'REV'),
227 _(b'REV'),
229 )
228 )
230 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
229 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
231 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
230 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
232 usage = _(b'[OPTION]... [FILE]...')
231 usage = _(b'[OPTION]... [FILE]...')
233
232
234
233
235 @command(
234 @command(
236 b'fix',
235 b'fix',
237 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
236 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
238 usage,
237 usage,
239 helpcategory=command.CATEGORY_FILE_CONTENTS,
238 helpcategory=command.CATEGORY_FILE_CONTENTS,
240 )
239 )
241 def fix(ui, repo, *pats, **opts):
240 def fix(ui, repo, *pats, **opts):
242 """rewrite file content in changesets or working directory
241 """rewrite file content in changesets or working directory
243
242
244 Runs any configured tools to fix the content of files. Only affects files
243 Runs any configured tools to fix the content of files. Only affects files
245 with changes, unless file arguments are provided. Only affects changed lines
244 with changes, unless file arguments are provided. Only affects changed lines
246 of files, unless the --whole flag is used. Some tools may always affect the
245 of files, unless the --whole flag is used. Some tools may always affect the
247 whole file regardless of --whole.
246 whole file regardless of --whole.
248
247
249 If --working-dir is used, files with uncommitted changes in the working copy
248 If --working-dir is used, files with uncommitted changes in the working copy
250 will be fixed. Note that no backup are made.
249 will be fixed. Note that no backup are made.
251
250
252 If revisions are specified with --source, those revisions and their
251 If revisions are specified with --source, those revisions and their
253 descendants will be checked, and they may be replaced with new revisions
252 descendants will be checked, and they may be replaced with new revisions
254 that have fixed file content. By automatically including the descendants,
253 that have fixed file content. By automatically including the descendants,
255 no merging, rebasing, or evolution will be required. If an ancestor of the
254 no merging, rebasing, or evolution will be required. If an ancestor of the
256 working copy is included, then the working copy itself will also be fixed,
255 working copy is included, then the working copy itself will also be fixed,
257 and the working copy will be updated to the fixed parent.
256 and the working copy will be updated to the fixed parent.
258
257
259 When determining what lines of each file to fix at each revision, the whole
258 When determining what lines of each file to fix at each revision, the whole
260 set of revisions being fixed is considered, so that fixes to earlier
259 set of revisions being fixed is considered, so that fixes to earlier
261 revisions are not forgotten in later ones. The --base flag can be used to
260 revisions are not forgotten in later ones. The --base flag can be used to
262 override this default behavior, though it is not usually desirable to do so.
261 override this default behavior, though it is not usually desirable to do so.
263 """
262 """
264 opts = pycompat.byteskwargs(opts)
263 opts = pycompat.byteskwargs(opts)
265 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
264 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
266 cmdutil.check_incompatible_arguments(
265 cmdutil.check_incompatible_arguments(
267 opts, b'working_dir', [b'all', b'source']
266 opts, b'working_dir', [b'all', b'source']
268 )
267 )
269
268
270 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
269 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
271 revstofix = getrevstofix(ui, repo, opts)
270 revstofix = getrevstofix(ui, repo, opts)
272 basectxs = getbasectxs(repo, opts, revstofix)
271 basectxs = getbasectxs(repo, opts, revstofix)
273 workqueue, numitems = getworkqueue(
272 workqueue, numitems = getworkqueue(
274 ui, repo, pats, opts, revstofix, basectxs
273 ui, repo, pats, opts, revstofix, basectxs
275 )
274 )
276 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
275 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
277 fixers = getfixers(ui)
276 fixers = getfixers(ui)
278
277
279 # Rather than letting each worker independently fetch the files
278 # Rather than letting each worker independently fetch the files
280 # (which also would add complications for shared/keepalive
279 # (which also would add complications for shared/keepalive
281 # connections), prefetch them all first.
280 # connections), prefetch them all first.
282 _prefetchfiles(repo, workqueue, basepaths)
281 _prefetchfiles(repo, workqueue, basepaths)
283
282
284 # There are no data dependencies between the workers fixing each file
283 # There are no data dependencies between the workers fixing each file
285 # revision, so we can use all available parallelism.
284 # revision, so we can use all available parallelism.
286 def getfixes(items):
285 def getfixes(items):
287 for srcrev, path, dstrevs in items:
286 for srcrev, path, dstrevs in items:
288 ctx = repo[srcrev]
287 ctx = repo[srcrev]
289 olddata = ctx[path].data()
288 olddata = ctx[path].data()
290 metadata, newdata = fixfile(
289 metadata, newdata = fixfile(
291 ui,
290 ui,
292 repo,
291 repo,
293 opts,
292 opts,
294 fixers,
293 fixers,
295 ctx,
294 ctx,
296 path,
295 path,
297 basepaths,
296 basepaths,
298 basectxs[srcrev],
297 basectxs[srcrev],
299 )
298 )
300 # We ungroup the work items now, because the code that consumes
299 # We ungroup the work items now, because the code that consumes
301 # these results has to handle each dstrev separately, and in
300 # these results has to handle each dstrev separately, and in
302 # topological order. Because these are handled in topological
301 # topological order. Because these are handled in topological
303 # order, it's important that we pass around references to
302 # order, it's important that we pass around references to
304 # "newdata" instead of copying it. Otherwise, we would be
303 # "newdata" instead of copying it. Otherwise, we would be
305 # keeping more copies of file content in memory at a time than
304 # keeping more copies of file content in memory at a time than
306 # if we hadn't bothered to group/deduplicate the work items.
305 # if we hadn't bothered to group/deduplicate the work items.
307 data = newdata if newdata != olddata else None
306 data = newdata if newdata != olddata else None
308 for dstrev in dstrevs:
307 for dstrev in dstrevs:
309 yield (dstrev, path, metadata, data)
308 yield (dstrev, path, metadata, data)
310
309
311 results = worker.worker(
310 results = worker.worker(
312 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
311 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
313 )
312 )
314
313
315 # We have to hold on to the data for each successor revision in memory
314 # We have to hold on to the data for each successor revision in memory
316 # until all its parents are committed. We ensure this by committing and
315 # until all its parents are committed. We ensure this by committing and
317 # freeing memory for the revisions in some topological order. This
316 # freeing memory for the revisions in some topological order. This
318 # leaves a little bit of memory efficiency on the table, but also makes
317 # leaves a little bit of memory efficiency on the table, but also makes
319 # the tests deterministic. It might also be considered a feature since
318 # the tests deterministic. It might also be considered a feature since
320 # it makes the results more easily reproducible.
319 # it makes the results more easily reproducible.
321 filedata = collections.defaultdict(dict)
320 filedata = collections.defaultdict(dict)
322 aggregatemetadata = collections.defaultdict(list)
321 aggregatemetadata = collections.defaultdict(list)
323 replacements = {}
322 replacements = {}
324 wdirwritten = False
323 wdirwritten = False
325 commitorder = sorted(revstofix, reverse=True)
324 commitorder = sorted(revstofix, reverse=True)
326 with ui.makeprogress(
325 with ui.makeprogress(
327 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
326 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
328 ) as progress:
327 ) as progress:
329 for rev, path, filerevmetadata, newdata in results:
328 for rev, path, filerevmetadata, newdata in results:
330 progress.increment(item=path)
329 progress.increment(item=path)
331 for fixername, fixermetadata in filerevmetadata.items():
330 for fixername, fixermetadata in filerevmetadata.items():
332 aggregatemetadata[fixername].append(fixermetadata)
331 aggregatemetadata[fixername].append(fixermetadata)
333 if newdata is not None:
332 if newdata is not None:
334 filedata[rev][path] = newdata
333 filedata[rev][path] = newdata
335 hookargs = {
334 hookargs = {
336 b'rev': rev,
335 b'rev': rev,
337 b'path': path,
336 b'path': path,
338 b'metadata': filerevmetadata,
337 b'metadata': filerevmetadata,
339 }
338 }
340 repo.hook(
339 repo.hook(
341 b'postfixfile',
340 b'postfixfile',
342 throw=False,
341 throw=False,
343 **pycompat.strkwargs(hookargs)
342 **pycompat.strkwargs(hookargs)
344 )
343 )
345 numitems[rev] -= 1
344 numitems[rev] -= 1
346 # Apply the fixes for this and any other revisions that are
345 # Apply the fixes for this and any other revisions that are
347 # ready and sitting at the front of the queue. Using a loop here
346 # ready and sitting at the front of the queue. Using a loop here
348 # prevents the queue from being blocked by the first revision to
347 # prevents the queue from being blocked by the first revision to
349 # be ready out of order.
348 # be ready out of order.
350 while commitorder and not numitems[commitorder[-1]]:
349 while commitorder and not numitems[commitorder[-1]]:
351 rev = commitorder.pop()
350 rev = commitorder.pop()
352 ctx = repo[rev]
351 ctx = repo[rev]
353 if rev == wdirrev:
352 if rev == wdirrev:
354 writeworkingdir(repo, ctx, filedata[rev], replacements)
353 writeworkingdir(repo, ctx, filedata[rev], replacements)
355 wdirwritten = bool(filedata[rev])
354 wdirwritten = bool(filedata[rev])
356 else:
355 else:
357 replacerev(ui, repo, ctx, filedata[rev], replacements)
356 replacerev(ui, repo, ctx, filedata[rev], replacements)
358 del filedata[rev]
357 del filedata[rev]
359
358
360 cleanup(repo, replacements, wdirwritten)
359 cleanup(repo, replacements, wdirwritten)
361 hookargs = {
360 hookargs = {
362 b'replacements': replacements,
361 b'replacements': replacements,
363 b'wdirwritten': wdirwritten,
362 b'wdirwritten': wdirwritten,
364 b'metadata': aggregatemetadata,
363 b'metadata': aggregatemetadata,
365 }
364 }
366 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
365 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
367
366
368
367
369 def cleanup(repo, replacements, wdirwritten):
368 def cleanup(repo, replacements, wdirwritten):
370 """Calls scmutil.cleanupnodes() with the given replacements.
369 """Calls scmutil.cleanupnodes() with the given replacements.
371
370
372 "replacements" is a dict from nodeid to nodeid, with one key and one value
371 "replacements" is a dict from nodeid to nodeid, with one key and one value
373 for every revision that was affected by fixing. This is slightly different
372 for every revision that was affected by fixing. This is slightly different
374 from cleanupnodes().
373 from cleanupnodes().
375
374
376 "wdirwritten" is a bool which tells whether the working copy was affected by
375 "wdirwritten" is a bool which tells whether the working copy was affected by
377 fixing, since it has no entry in "replacements".
376 fixing, since it has no entry in "replacements".
378
377
379 Useful as a hook point for extending "hg fix" with output summarizing the
378 Useful as a hook point for extending "hg fix" with output summarizing the
380 effects of the command, though we choose not to output anything here.
379 effects of the command, though we choose not to output anything here.
381 """
380 """
382 replacements = {
381 replacements = {
383 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
382 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
384 }
383 }
385 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
384 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
386
385
387
386
388 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
387 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
389 """Constructs a list of files to fix and which revisions each fix applies to
388 """Constructs a list of files to fix and which revisions each fix applies to
390
389
391 To avoid duplicating work, there is usually only one work item for each file
390 To avoid duplicating work, there is usually only one work item for each file
392 revision that might need to be fixed. There can be multiple work items per
391 revision that might need to be fixed. There can be multiple work items per
393 file revision if the same file needs to be fixed in multiple changesets with
392 file revision if the same file needs to be fixed in multiple changesets with
394 different baserevs. Each work item also contains a list of changesets where
393 different baserevs. Each work item also contains a list of changesets where
395 the file's data should be replaced with the fixed data. The work items for
394 the file's data should be replaced with the fixed data. The work items for
396 earlier changesets come earlier in the work queue, to improve pipelining by
395 earlier changesets come earlier in the work queue, to improve pipelining by
397 allowing the first changeset to be replaced while fixes are still being
396 allowing the first changeset to be replaced while fixes are still being
398 computed for later changesets.
397 computed for later changesets.
399
398
400 Also returned is a map from changesets to the count of work items that might
399 Also returned is a map from changesets to the count of work items that might
401 affect each changeset. This is used later to count when all of a changeset's
400 affect each changeset. This is used later to count when all of a changeset's
402 work items have been finished, without having to inspect the remaining work
401 work items have been finished, without having to inspect the remaining work
403 queue in each worker subprocess.
402 queue in each worker subprocess.
404
403
405 The example work item (1, "foo/bar.txt", (1, 2, 3)) means that the data of
404 The example work item (1, "foo/bar.txt", (1, 2, 3)) means that the data of
406 bar.txt should be read from revision 1, then fixed, and written back to
405 bar.txt should be read from revision 1, then fixed, and written back to
407 revisions 1, 2 and 3. Revision 1 is called the "srcrev" and the list of
406 revisions 1, 2 and 3. Revision 1 is called the "srcrev" and the list of
408 revisions is called the "dstrevs". In practice the srcrev is always one of
407 revisions is called the "dstrevs". In practice the srcrev is always one of
409 the dstrevs, and we make that choice when constructing the work item so that
408 the dstrevs, and we make that choice when constructing the work item so that
410 the choice can't be made inconsistently later on. The dstrevs should all
409 the choice can't be made inconsistently later on. The dstrevs should all
411 have the same file revision for the given path, so the choice of srcrev is
410 have the same file revision for the given path, so the choice of srcrev is
412 arbitrary. The wdirrev can be a dstrev and a srcrev.
411 arbitrary. The wdirrev can be a dstrev and a srcrev.
413 """
412 """
414 dstrevmap = collections.defaultdict(list)
413 dstrevmap = collections.defaultdict(list)
415 numitems = collections.defaultdict(int)
414 numitems = collections.defaultdict(int)
416 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
415 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
417 for rev in sorted(revstofix):
416 for rev in sorted(revstofix):
418 fixctx = repo[rev]
417 fixctx = repo[rev]
419 match = scmutil.match(fixctx, pats, opts)
418 match = scmutil.match(fixctx, pats, opts)
420 for path in sorted(
419 for path in sorted(
421 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
420 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
422 ):
421 ):
423 fctx = fixctx[path]
422 fctx = fixctx[path]
424 if fctx.islink():
423 if fctx.islink():
425 continue
424 continue
426 if fctx.size() > maxfilesize:
425 if fctx.size() > maxfilesize:
427 ui.warn(
426 ui.warn(
428 _(b'ignoring file larger than %s: %s\n')
427 _(b'ignoring file larger than %s: %s\n')
429 % (util.bytecount(maxfilesize), path)
428 % (util.bytecount(maxfilesize), path)
430 )
429 )
431 continue
430 continue
432 baserevs = tuple(ctx.rev() for ctx in basectxs[rev])
431 baserevs = tuple(ctx.rev() for ctx in basectxs[rev])
433 dstrevmap[(fctx.filerev(), baserevs, path)].append(rev)
432 dstrevmap[(fctx.filerev(), baserevs, path)].append(rev)
434 numitems[rev] += 1
433 numitems[rev] += 1
435 workqueue = [
434 workqueue = [
436 (min(dstrevs), path, dstrevs)
435 (min(dstrevs), path, dstrevs)
437 for (_filerev, _baserevs, path), dstrevs in dstrevmap.items()
436 for (_filerev, _baserevs, path), dstrevs in dstrevmap.items()
438 ]
437 ]
439 # Move work items for earlier changesets to the front of the queue, so we
438 # Move work items for earlier changesets to the front of the queue, so we
440 # might be able to replace those changesets (in topological order) while
439 # might be able to replace those changesets (in topological order) while
441 # we're still processing later work items. Note the min() in the previous
440 # we're still processing later work items. Note the min() in the previous
442 # expression, which means we don't need a custom comparator here. The path
441 # expression, which means we don't need a custom comparator here. The path
443 # is also important in the sort order to make the output order stable. There
442 # is also important in the sort order to make the output order stable. There
444 # are some situations where this doesn't help much, but some situations
443 # are some situations where this doesn't help much, but some situations
445 # where it lets us buffer O(1) files instead of O(n) files.
444 # where it lets us buffer O(1) files instead of O(n) files.
446 workqueue.sort()
445 workqueue.sort()
447 return workqueue, numitems
446 return workqueue, numitems
448
447
449
448
450 def getrevstofix(ui, repo, opts):
449 def getrevstofix(ui, repo, opts):
451 """Returns the set of revision numbers that should be fixed"""
450 """Returns the set of revision numbers that should be fixed"""
452 if opts[b'all']:
451 if opts[b'all']:
453 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
452 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
454 elif opts[b'source']:
453 elif opts[b'source']:
455 source_revs = logcmdutil.revrange(repo, opts[b'source'])
454 source_revs = logcmdutil.revrange(repo, opts[b'source'])
456 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
455 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
457 if wdirrev in source_revs:
456 if wdirrev in source_revs:
458 # `wdir()::` is currently empty, so manually add wdir
457 # `wdir()::` is currently empty, so manually add wdir
459 revs.add(wdirrev)
458 revs.add(wdirrev)
460 if repo[b'.'].rev() in revs:
459 if repo[b'.'].rev() in revs:
461 revs.add(wdirrev)
460 revs.add(wdirrev)
462 else:
461 else:
463 revs = set(logcmdutil.revrange(repo, opts[b'rev']))
462 revs = set(logcmdutil.revrange(repo, opts[b'rev']))
464 if opts.get(b'working_dir'):
463 if opts.get(b'working_dir'):
465 revs.add(wdirrev)
464 revs.add(wdirrev)
466 for rev in revs:
467 checkfixablectx(ui, repo, repo[rev])
468 # Allow fixing only wdir() even if there's an unfinished operation
465 # Allow fixing only wdir() even if there's an unfinished operation
469 if not (len(revs) == 1 and wdirrev in revs):
466 if not (len(revs) == 1 and wdirrev in revs):
470 cmdutil.checkunfinished(repo)
467 cmdutil.checkunfinished(repo)
471 rewriteutil.precheck(repo, revs, b'fix')
468 rewriteutil.precheck(repo, revs, b'fix')
472 if (
469 if (
473 wdirrev in revs
470 wdirrev in revs
474 and mergestatemod.mergestate.read(repo).unresolvedcount()
471 and mergestatemod.mergestate.read(repo).unresolvedcount()
475 ):
472 ):
476 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
473 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
477 if not revs:
474 if not revs:
478 raise error.Abort(
475 raise error.Abort(
479 b'no changesets specified', hint=b'use --source or --working-dir'
476 b'no changesets specified', hint=b'use --source or --working-dir'
480 )
477 )
481 return revs
478 return revs
482
479
483
480
484 def checkfixablectx(ui, repo, ctx):
485 """Aborts if the revision shouldn't be replaced with a fixed one."""
486 if ctx.obsolete():
487 # It would be better to actually check if the revision has a successor.
488 if not obsolete.isenabled(repo, obsolete.allowdivergenceopt):
489 raise error.Abort(
490 b'fixing obsolete revision could cause divergence'
491 )
492
493
494 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
481 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
495 """Returns the set of files that should be fixed in a context
482 """Returns the set of files that should be fixed in a context
496
483
497 The result depends on the base contexts; we include any file that has
484 The result depends on the base contexts; we include any file that has
498 changed relative to any of the base contexts. Base contexts should be
485 changed relative to any of the base contexts. Base contexts should be
499 ancestors of the context being fixed.
486 ancestors of the context being fixed.
500 """
487 """
501 files = set()
488 files = set()
502 for basectx in basectxs:
489 for basectx in basectxs:
503 stat = basectx.status(
490 stat = basectx.status(
504 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
491 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
505 )
492 )
506 files.update(
493 files.update(
507 set(
494 set(
508 itertools.chain(
495 itertools.chain(
509 stat.added, stat.modified, stat.clean, stat.unknown
496 stat.added, stat.modified, stat.clean, stat.unknown
510 )
497 )
511 )
498 )
512 )
499 )
513 return files
500 return files
514
501
515
502
516 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
503 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
517 """Returns the set of line ranges that should be fixed in a file
504 """Returns the set of line ranges that should be fixed in a file
518
505
519 Of the form [(10, 20), (30, 40)].
506 Of the form [(10, 20), (30, 40)].
520
507
521 This depends on the given base contexts; we must consider lines that have
508 This depends on the given base contexts; we must consider lines that have
522 changed versus any of the base contexts, and whether the file has been
509 changed versus any of the base contexts, and whether the file has been
523 renamed versus any of them.
510 renamed versus any of them.
524
511
525 Another way to understand this is that we exclude line ranges that are
512 Another way to understand this is that we exclude line ranges that are
526 common to the file in all base contexts.
513 common to the file in all base contexts.
527 """
514 """
528 if opts.get(b'whole'):
515 if opts.get(b'whole'):
529 # Return a range containing all lines. Rely on the diff implementation's
516 # Return a range containing all lines. Rely on the diff implementation's
530 # idea of how many lines are in the file, instead of reimplementing it.
517 # idea of how many lines are in the file, instead of reimplementing it.
531 return difflineranges(b'', content2)
518 return difflineranges(b'', content2)
532
519
533 rangeslist = []
520 rangeslist = []
534 for basectx in basectxs:
521 for basectx in basectxs:
535 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
522 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
536
523
537 if basepath in basectx:
524 if basepath in basectx:
538 content1 = basectx[basepath].data()
525 content1 = basectx[basepath].data()
539 else:
526 else:
540 content1 = b''
527 content1 = b''
541 rangeslist.extend(difflineranges(content1, content2))
528 rangeslist.extend(difflineranges(content1, content2))
542 return unionranges(rangeslist)
529 return unionranges(rangeslist)
543
530
544
531
545 def getbasepaths(repo, opts, workqueue, basectxs):
532 def getbasepaths(repo, opts, workqueue, basectxs):
546 if opts.get(b'whole'):
533 if opts.get(b'whole'):
547 # Base paths will never be fetched for line range determination.
534 # Base paths will never be fetched for line range determination.
548 return {}
535 return {}
549
536
550 basepaths = {}
537 basepaths = {}
551 for srcrev, path, _dstrevs in workqueue:
538 for srcrev, path, _dstrevs in workqueue:
552 fixctx = repo[srcrev]
539 fixctx = repo[srcrev]
553 for basectx in basectxs[srcrev]:
540 for basectx in basectxs[srcrev]:
554 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
541 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
555 if basepath in basectx:
542 if basepath in basectx:
556 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
543 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
557 return basepaths
544 return basepaths
558
545
559
546
560 def unionranges(rangeslist):
547 def unionranges(rangeslist):
561 """Return the union of some closed intervals
548 """Return the union of some closed intervals
562
549
563 >>> unionranges([])
550 >>> unionranges([])
564 []
551 []
565 >>> unionranges([(1, 100)])
552 >>> unionranges([(1, 100)])
566 [(1, 100)]
553 [(1, 100)]
567 >>> unionranges([(1, 100), (1, 100)])
554 >>> unionranges([(1, 100), (1, 100)])
568 [(1, 100)]
555 [(1, 100)]
569 >>> unionranges([(1, 100), (2, 100)])
556 >>> unionranges([(1, 100), (2, 100)])
570 [(1, 100)]
557 [(1, 100)]
571 >>> unionranges([(1, 99), (1, 100)])
558 >>> unionranges([(1, 99), (1, 100)])
572 [(1, 100)]
559 [(1, 100)]
573 >>> unionranges([(1, 100), (40, 60)])
560 >>> unionranges([(1, 100), (40, 60)])
574 [(1, 100)]
561 [(1, 100)]
575 >>> unionranges([(1, 49), (50, 100)])
562 >>> unionranges([(1, 49), (50, 100)])
576 [(1, 100)]
563 [(1, 100)]
577 >>> unionranges([(1, 48), (50, 100)])
564 >>> unionranges([(1, 48), (50, 100)])
578 [(1, 48), (50, 100)]
565 [(1, 48), (50, 100)]
579 >>> unionranges([(1, 2), (3, 4), (5, 6)])
566 >>> unionranges([(1, 2), (3, 4), (5, 6)])
580 [(1, 6)]
567 [(1, 6)]
581 """
568 """
582 rangeslist = sorted(set(rangeslist))
569 rangeslist = sorted(set(rangeslist))
583 unioned = []
570 unioned = []
584 if rangeslist:
571 if rangeslist:
585 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
572 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
586 for a, b in rangeslist:
573 for a, b in rangeslist:
587 c, d = unioned[-1]
574 c, d = unioned[-1]
588 if a > d + 1:
575 if a > d + 1:
589 unioned.append((a, b))
576 unioned.append((a, b))
590 else:
577 else:
591 unioned[-1] = (c, max(b, d))
578 unioned[-1] = (c, max(b, d))
592 return unioned
579 return unioned
593
580
594
581
595 def difflineranges(content1, content2):
582 def difflineranges(content1, content2):
596 """Return list of line number ranges in content2 that differ from content1.
583 """Return list of line number ranges in content2 that differ from content1.
597
584
598 Line numbers are 1-based. The numbers are the first and last line contained
585 Line numbers are 1-based. The numbers are the first and last line contained
599 in the range. Single-line ranges have the same line number for the first and
586 in the range. Single-line ranges have the same line number for the first and
600 last line. Excludes any empty ranges that result from lines that are only
587 last line. Excludes any empty ranges that result from lines that are only
601 present in content1. Relies on mdiff's idea of where the line endings are in
588 present in content1. Relies on mdiff's idea of where the line endings are in
602 the string.
589 the string.
603
590
604 >>> from mercurial import pycompat
591 >>> from mercurial import pycompat
605 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
592 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
606 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
593 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
607 >>> difflineranges2(b'', b'')
594 >>> difflineranges2(b'', b'')
608 []
595 []
609 >>> difflineranges2(b'a', b'')
596 >>> difflineranges2(b'a', b'')
610 []
597 []
611 >>> difflineranges2(b'', b'A')
598 >>> difflineranges2(b'', b'A')
612 [(1, 1)]
599 [(1, 1)]
613 >>> difflineranges2(b'a', b'a')
600 >>> difflineranges2(b'a', b'a')
614 []
601 []
615 >>> difflineranges2(b'a', b'A')
602 >>> difflineranges2(b'a', b'A')
616 [(1, 1)]
603 [(1, 1)]
617 >>> difflineranges2(b'ab', b'')
604 >>> difflineranges2(b'ab', b'')
618 []
605 []
619 >>> difflineranges2(b'', b'AB')
606 >>> difflineranges2(b'', b'AB')
620 [(1, 2)]
607 [(1, 2)]
621 >>> difflineranges2(b'abc', b'ac')
608 >>> difflineranges2(b'abc', b'ac')
622 []
609 []
623 >>> difflineranges2(b'ab', b'aCb')
610 >>> difflineranges2(b'ab', b'aCb')
624 [(2, 2)]
611 [(2, 2)]
625 >>> difflineranges2(b'abc', b'aBc')
612 >>> difflineranges2(b'abc', b'aBc')
626 [(2, 2)]
613 [(2, 2)]
627 >>> difflineranges2(b'ab', b'AB')
614 >>> difflineranges2(b'ab', b'AB')
628 [(1, 2)]
615 [(1, 2)]
629 >>> difflineranges2(b'abcde', b'aBcDe')
616 >>> difflineranges2(b'abcde', b'aBcDe')
630 [(2, 2), (4, 4)]
617 [(2, 2), (4, 4)]
631 >>> difflineranges2(b'abcde', b'aBCDe')
618 >>> difflineranges2(b'abcde', b'aBCDe')
632 [(2, 4)]
619 [(2, 4)]
633 """
620 """
634 ranges = []
621 ranges = []
635 for lines, kind in mdiff.allblocks(content1, content2):
622 for lines, kind in mdiff.allblocks(content1, content2):
636 firstline, lastline = lines[2:4]
623 firstline, lastline = lines[2:4]
637 if kind == b'!' and firstline != lastline:
624 if kind == b'!' and firstline != lastline:
638 ranges.append((firstline + 1, lastline))
625 ranges.append((firstline + 1, lastline))
639 return ranges
626 return ranges
640
627
641
628
642 def getbasectxs(repo, opts, revstofix):
629 def getbasectxs(repo, opts, revstofix):
643 """Returns a map of the base contexts for each revision
630 """Returns a map of the base contexts for each revision
644
631
645 The base contexts determine which lines are considered modified when we
632 The base contexts determine which lines are considered modified when we
646 attempt to fix just the modified lines in a file. It also determines which
633 attempt to fix just the modified lines in a file. It also determines which
647 files we attempt to fix, so it is important to compute this even when
634 files we attempt to fix, so it is important to compute this even when
648 --whole is used.
635 --whole is used.
649 """
636 """
650 # The --base flag overrides the usual logic, and we give every revision
637 # The --base flag overrides the usual logic, and we give every revision
651 # exactly the set of baserevs that the user specified.
638 # exactly the set of baserevs that the user specified.
652 if opts.get(b'base'):
639 if opts.get(b'base'):
653 baserevs = set(logcmdutil.revrange(repo, opts.get(b'base')))
640 baserevs = set(logcmdutil.revrange(repo, opts.get(b'base')))
654 if not baserevs:
641 if not baserevs:
655 baserevs = {nullrev}
642 baserevs = {nullrev}
656 basectxs = {repo[rev] for rev in baserevs}
643 basectxs = {repo[rev] for rev in baserevs}
657 return {rev: basectxs for rev in revstofix}
644 return {rev: basectxs for rev in revstofix}
658
645
659 # Proceed in topological order so that we can easily determine each
646 # Proceed in topological order so that we can easily determine each
660 # revision's baserevs by looking at its parents and their baserevs.
647 # revision's baserevs by looking at its parents and their baserevs.
661 basectxs = collections.defaultdict(set)
648 basectxs = collections.defaultdict(set)
662 for rev in sorted(revstofix):
649 for rev in sorted(revstofix):
663 ctx = repo[rev]
650 ctx = repo[rev]
664 for pctx in ctx.parents():
651 for pctx in ctx.parents():
665 if pctx.rev() in basectxs:
652 if pctx.rev() in basectxs:
666 basectxs[rev].update(basectxs[pctx.rev()])
653 basectxs[rev].update(basectxs[pctx.rev()])
667 else:
654 else:
668 basectxs[rev].add(pctx)
655 basectxs[rev].add(pctx)
669 return basectxs
656 return basectxs
670
657
671
658
672 def _prefetchfiles(repo, workqueue, basepaths):
659 def _prefetchfiles(repo, workqueue, basepaths):
673 toprefetch = set()
660 toprefetch = set()
674
661
675 # Prefetch the files that will be fixed.
662 # Prefetch the files that will be fixed.
676 for srcrev, path, _dstrevs in workqueue:
663 for srcrev, path, _dstrevs in workqueue:
677 if srcrev == wdirrev:
664 if srcrev == wdirrev:
678 continue
665 continue
679 toprefetch.add((srcrev, path))
666 toprefetch.add((srcrev, path))
680
667
681 # Prefetch the base contents for lineranges().
668 # Prefetch the base contents for lineranges().
682 for (baserev, fixrev, path), basepath in basepaths.items():
669 for (baserev, fixrev, path), basepath in basepaths.items():
683 toprefetch.add((baserev, basepath))
670 toprefetch.add((baserev, basepath))
684
671
685 if toprefetch:
672 if toprefetch:
686 scmutil.prefetchfiles(
673 scmutil.prefetchfiles(
687 repo,
674 repo,
688 [
675 [
689 (rev, scmutil.matchfiles(repo, [path]))
676 (rev, scmutil.matchfiles(repo, [path]))
690 for rev, path in toprefetch
677 for rev, path in toprefetch
691 ],
678 ],
692 )
679 )
693
680
694
681
695 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
682 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
696 """Run any configured fixers that should affect the file in this context
683 """Run any configured fixers that should affect the file in this context
697
684
698 Returns the file content that results from applying the fixers in some order
685 Returns the file content that results from applying the fixers in some order
699 starting with the file's content in the fixctx. Fixers that support line
686 starting with the file's content in the fixctx. Fixers that support line
700 ranges will affect lines that have changed relative to any of the basectxs
687 ranges will affect lines that have changed relative to any of the basectxs
701 (i.e. they will only avoid lines that are common to all basectxs).
688 (i.e. they will only avoid lines that are common to all basectxs).
702
689
703 A fixer tool's stdout will become the file's new content if and only if it
690 A fixer tool's stdout will become the file's new content if and only if it
704 exits with code zero. The fixer tool's working directory is the repository's
691 exits with code zero. The fixer tool's working directory is the repository's
705 root.
692 root.
706 """
693 """
707 metadata = {}
694 metadata = {}
708 newdata = fixctx[path].data()
695 newdata = fixctx[path].data()
709 for fixername, fixer in pycompat.iteritems(fixers):
696 for fixername, fixer in pycompat.iteritems(fixers):
710 if fixer.affects(opts, fixctx, path):
697 if fixer.affects(opts, fixctx, path):
711 ranges = lineranges(
698 ranges = lineranges(
712 opts, path, basepaths, basectxs, fixctx, newdata
699 opts, path, basepaths, basectxs, fixctx, newdata
713 )
700 )
714 command = fixer.command(ui, path, ranges)
701 command = fixer.command(ui, path, ranges)
715 if command is None:
702 if command is None:
716 continue
703 continue
717 ui.debug(b'subprocess: %s\n' % (command,))
704 ui.debug(b'subprocess: %s\n' % (command,))
718 proc = subprocess.Popen(
705 proc = subprocess.Popen(
719 procutil.tonativestr(command),
706 procutil.tonativestr(command),
720 shell=True,
707 shell=True,
721 cwd=procutil.tonativestr(repo.root),
708 cwd=procutil.tonativestr(repo.root),
722 stdin=subprocess.PIPE,
709 stdin=subprocess.PIPE,
723 stdout=subprocess.PIPE,
710 stdout=subprocess.PIPE,
724 stderr=subprocess.PIPE,
711 stderr=subprocess.PIPE,
725 )
712 )
726 stdout, stderr = proc.communicate(newdata)
713 stdout, stderr = proc.communicate(newdata)
727 if stderr:
714 if stderr:
728 showstderr(ui, fixctx.rev(), fixername, stderr)
715 showstderr(ui, fixctx.rev(), fixername, stderr)
729 newerdata = stdout
716 newerdata = stdout
730 if fixer.shouldoutputmetadata():
717 if fixer.shouldoutputmetadata():
731 try:
718 try:
732 metadatajson, newerdata = stdout.split(b'\0', 1)
719 metadatajson, newerdata = stdout.split(b'\0', 1)
733 metadata[fixername] = pycompat.json_loads(metadatajson)
720 metadata[fixername] = pycompat.json_loads(metadatajson)
734 except ValueError:
721 except ValueError:
735 ui.warn(
722 ui.warn(
736 _(b'ignored invalid output from fixer tool: %s\n')
723 _(b'ignored invalid output from fixer tool: %s\n')
737 % (fixername,)
724 % (fixername,)
738 )
725 )
739 continue
726 continue
740 else:
727 else:
741 metadata[fixername] = None
728 metadata[fixername] = None
742 if proc.returncode == 0:
729 if proc.returncode == 0:
743 newdata = newerdata
730 newdata = newerdata
744 else:
731 else:
745 if not stderr:
732 if not stderr:
746 message = _(b'exited with status %d\n') % (proc.returncode,)
733 message = _(b'exited with status %d\n') % (proc.returncode,)
747 showstderr(ui, fixctx.rev(), fixername, message)
734 showstderr(ui, fixctx.rev(), fixername, message)
748 checktoolfailureaction(
735 checktoolfailureaction(
749 ui,
736 ui,
750 _(b'no fixes will be applied'),
737 _(b'no fixes will be applied'),
751 hint=_(
738 hint=_(
752 b'use --config fix.failure=continue to apply any '
739 b'use --config fix.failure=continue to apply any '
753 b'successful fixes anyway'
740 b'successful fixes anyway'
754 ),
741 ),
755 )
742 )
756 return metadata, newdata
743 return metadata, newdata
757
744
758
745
759 def showstderr(ui, rev, fixername, stderr):
746 def showstderr(ui, rev, fixername, stderr):
760 """Writes the lines of the stderr string as warnings on the ui
747 """Writes the lines of the stderr string as warnings on the ui
761
748
762 Uses the revision number and fixername to give more context to each line of
749 Uses the revision number and fixername to give more context to each line of
763 the error message. Doesn't include file names, since those take up a lot of
750 the error message. Doesn't include file names, since those take up a lot of
764 space and would tend to be included in the error message if they were
751 space and would tend to be included in the error message if they were
765 relevant.
752 relevant.
766 """
753 """
767 for line in re.split(b'[\r\n]+', stderr):
754 for line in re.split(b'[\r\n]+', stderr):
768 if line:
755 if line:
769 ui.warn(b'[')
756 ui.warn(b'[')
770 if rev is None:
757 if rev is None:
771 ui.warn(_(b'wdir'), label=b'evolve.rev')
758 ui.warn(_(b'wdir'), label=b'evolve.rev')
772 else:
759 else:
773 ui.warn(b'%d' % rev, label=b'evolve.rev')
760 ui.warn(b'%d' % rev, label=b'evolve.rev')
774 ui.warn(b'] %s: %s\n' % (fixername, line))
761 ui.warn(b'] %s: %s\n' % (fixername, line))
775
762
776
763
777 def writeworkingdir(repo, ctx, filedata, replacements):
764 def writeworkingdir(repo, ctx, filedata, replacements):
778 """Write new content to the working copy and check out the new p1 if any
765 """Write new content to the working copy and check out the new p1 if any
779
766
780 We check out a new revision if and only if we fixed something in both the
767 We check out a new revision if and only if we fixed something in both the
781 working directory and its parent revision. This avoids the need for a full
768 working directory and its parent revision. This avoids the need for a full
782 update/merge, and means that the working directory simply isn't affected
769 update/merge, and means that the working directory simply isn't affected
783 unless the --working-dir flag is given.
770 unless the --working-dir flag is given.
784
771
785 Directly updates the dirstate for the affected files.
772 Directly updates the dirstate for the affected files.
786 """
773 """
787 for path, data in pycompat.iteritems(filedata):
774 for path, data in pycompat.iteritems(filedata):
788 fctx = ctx[path]
775 fctx = ctx[path]
789 fctx.write(data, fctx.flags())
776 fctx.write(data, fctx.flags())
790
777
791 oldp1 = repo.dirstate.p1()
778 oldp1 = repo.dirstate.p1()
792 newp1 = replacements.get(oldp1, oldp1)
779 newp1 = replacements.get(oldp1, oldp1)
793 if newp1 != oldp1:
780 if newp1 != oldp1:
794 assert repo.dirstate.p2() == nullid
781 assert repo.dirstate.p2() == nullid
795 with repo.dirstate.parentchange():
782 with repo.dirstate.parentchange():
796 scmutil.movedirstate(repo, repo[newp1])
783 scmutil.movedirstate(repo, repo[newp1])
797
784
798
785
799 def replacerev(ui, repo, ctx, filedata, replacements):
786 def replacerev(ui, repo, ctx, filedata, replacements):
800 """Commit a new revision like the given one, but with file content changes
787 """Commit a new revision like the given one, but with file content changes
801
788
802 "ctx" is the original revision to be replaced by a modified one.
789 "ctx" is the original revision to be replaced by a modified one.
803
790
804 "filedata" is a dict that maps paths to their new file content. All other
791 "filedata" is a dict that maps paths to their new file content. All other
805 paths will be recreated from the original revision without changes.
792 paths will be recreated from the original revision without changes.
806 "filedata" may contain paths that didn't exist in the original revision;
793 "filedata" may contain paths that didn't exist in the original revision;
807 they will be added.
794 they will be added.
808
795
809 "replacements" is a dict that maps a single node to a single node, and it is
796 "replacements" is a dict that maps a single node to a single node, and it is
810 updated to indicate the original revision is replaced by the newly created
797 updated to indicate the original revision is replaced by the newly created
811 one. No entry is added if the replacement's node already exists.
798 one. No entry is added if the replacement's node already exists.
812
799
813 The new revision has the same parents as the old one, unless those parents
800 The new revision has the same parents as the old one, unless those parents
814 have already been replaced, in which case those replacements are the parents
801 have already been replaced, in which case those replacements are the parents
815 of this new revision. Thus, if revisions are replaced in topological order,
802 of this new revision. Thus, if revisions are replaced in topological order,
816 there is no need to rebase them into the original topology later.
803 there is no need to rebase them into the original topology later.
817 """
804 """
818
805
819 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
806 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
820 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
807 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
821 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
808 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
822 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
809 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
823
810
824 # We don't want to create a revision that has no changes from the original,
811 # We don't want to create a revision that has no changes from the original,
825 # but we should if the original revision's parent has been replaced.
812 # but we should if the original revision's parent has been replaced.
826 # Otherwise, we would produce an orphan that needs no actual human
813 # Otherwise, we would produce an orphan that needs no actual human
827 # intervention to evolve. We can't rely on commit() to avoid creating the
814 # intervention to evolve. We can't rely on commit() to avoid creating the
828 # un-needed revision because the extra field added below produces a new hash
815 # un-needed revision because the extra field added below produces a new hash
829 # regardless of file content changes.
816 # regardless of file content changes.
830 if (
817 if (
831 not filedata
818 not filedata
832 and p1ctx.node() not in replacements
819 and p1ctx.node() not in replacements
833 and p2ctx.node() not in replacements
820 and p2ctx.node() not in replacements
834 ):
821 ):
835 return
822 return
836
823
837 extra = ctx.extra().copy()
824 extra = ctx.extra().copy()
838 extra[b'fix_source'] = ctx.hex()
825 extra[b'fix_source'] = ctx.hex()
839
826
840 wctx = context.overlayworkingctx(repo)
827 wctx = context.overlayworkingctx(repo)
841 wctx.setbase(repo[newp1node])
828 wctx.setbase(repo[newp1node])
842 merge.revert_to(ctx, wc=wctx)
829 merge.revert_to(ctx, wc=wctx)
843 copies.graftcopies(wctx, ctx, ctx.p1())
830 copies.graftcopies(wctx, ctx, ctx.p1())
844
831
845 for path in filedata.keys():
832 for path in filedata.keys():
846 fctx = ctx[path]
833 fctx = ctx[path]
847 copysource = fctx.copysource()
834 copysource = fctx.copysource()
848 wctx.write(path, filedata[path], flags=fctx.flags())
835 wctx.write(path, filedata[path], flags=fctx.flags())
849 if copysource:
836 if copysource:
850 wctx.markcopied(path, copysource)
837 wctx.markcopied(path, copysource)
851
838
852 desc = rewriteutil.update_hash_refs(
839 desc = rewriteutil.update_hash_refs(
853 repo,
840 repo,
854 ctx.description(),
841 ctx.description(),
855 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
842 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
856 )
843 )
857
844
858 memctx = wctx.tomemctx(
845 memctx = wctx.tomemctx(
859 text=desc,
846 text=desc,
860 branch=ctx.branch(),
847 branch=ctx.branch(),
861 extra=extra,
848 extra=extra,
862 date=ctx.date(),
849 date=ctx.date(),
863 parents=(newp1node, newp2node),
850 parents=(newp1node, newp2node),
864 user=ctx.user(),
851 user=ctx.user(),
865 )
852 )
866
853
867 sucnode = memctx.commit()
854 sucnode = memctx.commit()
868 prenode = ctx.node()
855 prenode = ctx.node()
869 if prenode == sucnode:
856 if prenode == sucnode:
870 ui.debug(b'node %s already existed\n' % (ctx.hex()))
857 ui.debug(b'node %s already existed\n' % (ctx.hex()))
871 else:
858 else:
872 replacements[ctx.node()] = sucnode
859 replacements[ctx.node()] = sucnode
873
860
874
861
875 def getfixers(ui):
862 def getfixers(ui):
876 """Returns a map of configured fixer tools indexed by their names
863 """Returns a map of configured fixer tools indexed by their names
877
864
878 Each value is a Fixer object with methods that implement the behavior of the
865 Each value is a Fixer object with methods that implement the behavior of the
879 fixer's config suboptions. Does not validate the config values.
866 fixer's config suboptions. Does not validate the config values.
880 """
867 """
881 fixers = {}
868 fixers = {}
882 for name in fixernames(ui):
869 for name in fixernames(ui):
883 enabled = ui.configbool(b'fix', name + b':enabled')
870 enabled = ui.configbool(b'fix', name + b':enabled')
884 command = ui.config(b'fix', name + b':command')
871 command = ui.config(b'fix', name + b':command')
885 pattern = ui.config(b'fix', name + b':pattern')
872 pattern = ui.config(b'fix', name + b':pattern')
886 linerange = ui.config(b'fix', name + b':linerange')
873 linerange = ui.config(b'fix', name + b':linerange')
887 priority = ui.configint(b'fix', name + b':priority')
874 priority = ui.configint(b'fix', name + b':priority')
888 metadata = ui.configbool(b'fix', name + b':metadata')
875 metadata = ui.configbool(b'fix', name + b':metadata')
889 skipclean = ui.configbool(b'fix', name + b':skipclean')
876 skipclean = ui.configbool(b'fix', name + b':skipclean')
890 # Don't use a fixer if it has no pattern configured. It would be
877 # Don't use a fixer if it has no pattern configured. It would be
891 # dangerous to let it affect all files. It would be pointless to let it
878 # dangerous to let it affect all files. It would be pointless to let it
892 # affect no files. There is no reasonable subset of files to use as the
879 # affect no files. There is no reasonable subset of files to use as the
893 # default.
880 # default.
894 if command is None:
881 if command is None:
895 ui.warn(
882 ui.warn(
896 _(b'fixer tool has no command configuration: %s\n') % (name,)
883 _(b'fixer tool has no command configuration: %s\n') % (name,)
897 )
884 )
898 elif pattern is None:
885 elif pattern is None:
899 ui.warn(
886 ui.warn(
900 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
887 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
901 )
888 )
902 elif not enabled:
889 elif not enabled:
903 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
890 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
904 else:
891 else:
905 fixers[name] = Fixer(
892 fixers[name] = Fixer(
906 command, pattern, linerange, priority, metadata, skipclean
893 command, pattern, linerange, priority, metadata, skipclean
907 )
894 )
908 return collections.OrderedDict(
895 return collections.OrderedDict(
909 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
896 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
910 )
897 )
911
898
912
899
913 def fixernames(ui):
900 def fixernames(ui):
914 """Returns the names of [fix] config options that have suboptions"""
901 """Returns the names of [fix] config options that have suboptions"""
915 names = set()
902 names = set()
916 for k, v in ui.configitems(b'fix'):
903 for k, v in ui.configitems(b'fix'):
917 if b':' in k:
904 if b':' in k:
918 names.add(k.split(b':', 1)[0])
905 names.add(k.split(b':', 1)[0])
919 return names
906 return names
920
907
921
908
922 class Fixer(object):
909 class Fixer(object):
923 """Wraps the raw config values for a fixer with methods"""
910 """Wraps the raw config values for a fixer with methods"""
924
911
925 def __init__(
912 def __init__(
926 self, command, pattern, linerange, priority, metadata, skipclean
913 self, command, pattern, linerange, priority, metadata, skipclean
927 ):
914 ):
928 self._command = command
915 self._command = command
929 self._pattern = pattern
916 self._pattern = pattern
930 self._linerange = linerange
917 self._linerange = linerange
931 self._priority = priority
918 self._priority = priority
932 self._metadata = metadata
919 self._metadata = metadata
933 self._skipclean = skipclean
920 self._skipclean = skipclean
934
921
935 def affects(self, opts, fixctx, path):
922 def affects(self, opts, fixctx, path):
936 """Should this fixer run on the file at the given path and context?"""
923 """Should this fixer run on the file at the given path and context?"""
937 repo = fixctx.repo()
924 repo = fixctx.repo()
938 matcher = matchmod.match(
925 matcher = matchmod.match(
939 repo.root, repo.root, [self._pattern], ctx=fixctx
926 repo.root, repo.root, [self._pattern], ctx=fixctx
940 )
927 )
941 return matcher(path)
928 return matcher(path)
942
929
943 def shouldoutputmetadata(self):
930 def shouldoutputmetadata(self):
944 """Should the stdout of this fixer start with JSON and a null byte?"""
931 """Should the stdout of this fixer start with JSON and a null byte?"""
945 return self._metadata
932 return self._metadata
946
933
947 def command(self, ui, path, ranges):
934 def command(self, ui, path, ranges):
948 """A shell command to use to invoke this fixer on the given file/lines
935 """A shell command to use to invoke this fixer on the given file/lines
949
936
950 May return None if there is no appropriate command to run for the given
937 May return None if there is no appropriate command to run for the given
951 parameters.
938 parameters.
952 """
939 """
953 expand = cmdutil.rendercommandtemplate
940 expand = cmdutil.rendercommandtemplate
954 parts = [
941 parts = [
955 expand(
942 expand(
956 ui,
943 ui,
957 self._command,
944 self._command,
958 {b'rootpath': path, b'basename': os.path.basename(path)},
945 {b'rootpath': path, b'basename': os.path.basename(path)},
959 )
946 )
960 ]
947 ]
961 if self._linerange:
948 if self._linerange:
962 if self._skipclean and not ranges:
949 if self._skipclean and not ranges:
963 # No line ranges to fix, so don't run the fixer.
950 # No line ranges to fix, so don't run the fixer.
964 return None
951 return None
965 for first, last in ranges:
952 for first, last in ranges:
966 parts.append(
953 parts.append(
967 expand(
954 expand(
968 ui, self._linerange, {b'first': first, b'last': last}
955 ui, self._linerange, {b'first': first, b'last': last}
969 )
956 )
970 )
957 )
971 return b' '.join(parts)
958 return b' '.join(parts)
@@ -1,343 +1,344 b''
1 """grant Mercurial the ability to operate on Git repositories. (EXPERIMENTAL)
1 """grant Mercurial the ability to operate on Git repositories. (EXPERIMENTAL)
2
2
3 This is currently super experimental. It probably will consume your
3 This is currently super experimental. It probably will consume your
4 firstborn a la Rumpelstiltskin, etc.
4 firstborn a la Rumpelstiltskin, etc.
5 """
5 """
6
6
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import os
9 import os
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 from mercurial import (
13 from mercurial import (
14 commands,
14 commands,
15 error,
15 error,
16 extensions,
16 extensions,
17 localrepo,
17 localrepo,
18 pycompat,
18 pycompat,
19 registrar,
19 registrar,
20 scmutil,
20 scmutil,
21 store,
21 store,
22 util,
22 util,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 dirstate,
26 dirstate,
27 gitlog,
27 gitlog,
28 gitutil,
28 gitutil,
29 index,
29 index,
30 )
30 )
31
31
32 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
32 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
33 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 # be specifying the version(s) of Mercurial they are tested with, or
34 # be specifying the version(s) of Mercurial they are tested with, or
35 # leave the attribute unspecified.
35 # leave the attribute unspecified.
36 testedwith = b'ships-with-hg-core'
36 testedwith = b'ships-with-hg-core'
37
37
38 configtable = {}
38 configtable = {}
39 configitem = registrar.configitem(configtable)
39 configitem = registrar.configitem(configtable)
40 # git.log-index-cache-miss: internal knob for testing
40 # git.log-index-cache-miss: internal knob for testing
41 configitem(
41 configitem(
42 b"git",
42 b"git",
43 b"log-index-cache-miss",
43 b"log-index-cache-miss",
44 default=False,
44 default=False,
45 )
45 )
46
46
47 getversion = gitutil.pygit2_version
47 getversion = gitutil.pygit2_version
48
48
49
49
50 # TODO: extract an interface for this in core
50 # TODO: extract an interface for this in core
51 class gitstore(object): # store.basicstore):
51 class gitstore(object): # store.basicstore):
52 def __init__(self, path, vfstype):
52 def __init__(self, path, vfstype):
53 self.vfs = vfstype(path)
53 self.vfs = vfstype(path)
54 self.opener = self.vfs
54 self.path = self.vfs.base
55 self.path = self.vfs.base
55 self.createmode = store._calcmode(self.vfs)
56 self.createmode = store._calcmode(self.vfs)
56 # above lines should go away in favor of:
57 # above lines should go away in favor of:
57 # super(gitstore, self).__init__(path, vfstype)
58 # super(gitstore, self).__init__(path, vfstype)
58
59
59 self.git = gitutil.get_pygit2().Repository(
60 self.git = gitutil.get_pygit2().Repository(
60 os.path.normpath(os.path.join(path, b'..', b'.git'))
61 os.path.normpath(os.path.join(path, b'..', b'.git'))
61 )
62 )
62 self._progress_factory = lambda *args, **kwargs: None
63 self._progress_factory = lambda *args, **kwargs: None
63 self._logfn = lambda x: None
64 self._logfn = lambda x: None
64
65
65 @util.propertycache
66 @util.propertycache
66 def _db(self):
67 def _db(self):
67 # We lazy-create the database because we want to thread a
68 # We lazy-create the database because we want to thread a
68 # progress callback down to the indexing process if it's
69 # progress callback down to the indexing process if it's
69 # required, and we don't have a ui handle in makestore().
70 # required, and we don't have a ui handle in makestore().
70 return index.get_index(self.git, self._logfn, self._progress_factory)
71 return index.get_index(self.git, self._logfn, self._progress_factory)
71
72
72 def join(self, f):
73 def join(self, f):
73 """Fake store.join method for git repositories.
74 """Fake store.join method for git repositories.
74
75
75 For the most part, store.join is used for @storecache
76 For the most part, store.join is used for @storecache
76 decorators to invalidate caches when various files
77 decorators to invalidate caches when various files
77 change. We'll map the ones we care about, and ignore the rest.
78 change. We'll map the ones we care about, and ignore the rest.
78 """
79 """
79 if f in (b'00changelog.i', b'00manifest.i'):
80 if f in (b'00changelog.i', b'00manifest.i'):
80 # This is close enough: in order for the changelog cache
81 # This is close enough: in order for the changelog cache
81 # to be invalidated, HEAD will have to change.
82 # to be invalidated, HEAD will have to change.
82 return os.path.join(self.path, b'HEAD')
83 return os.path.join(self.path, b'HEAD')
83 elif f == b'lock':
84 elif f == b'lock':
84 # TODO: we probably want to map this to a git lock, I
85 # TODO: we probably want to map this to a git lock, I
85 # suspect index.lock. We should figure out what the
86 # suspect index.lock. We should figure out what the
86 # most-alike file is in git-land. For now we're risking
87 # most-alike file is in git-land. For now we're risking
87 # bad concurrency errors if another git client is used.
88 # bad concurrency errors if another git client is used.
88 return os.path.join(self.path, b'hgit-bogus-lock')
89 return os.path.join(self.path, b'hgit-bogus-lock')
89 elif f in (b'obsstore', b'phaseroots', b'narrowspec', b'bookmarks'):
90 elif f in (b'obsstore', b'phaseroots', b'narrowspec', b'bookmarks'):
90 return os.path.join(self.path, b'..', b'.hg', f)
91 return os.path.join(self.path, b'..', b'.hg', f)
91 raise NotImplementedError(b'Need to pick file for %s.' % f)
92 raise NotImplementedError(b'Need to pick file for %s.' % f)
92
93
93 def changelog(self, trypending, concurrencychecker):
94 def changelog(self, trypending, concurrencychecker):
94 # TODO we don't have a plan for trypending in hg's git support yet
95 # TODO we don't have a plan for trypending in hg's git support yet
95 return gitlog.changelog(self.git, self._db)
96 return gitlog.changelog(self.git, self._db)
96
97
97 def manifestlog(self, repo, storenarrowmatch):
98 def manifestlog(self, repo, storenarrowmatch):
98 # TODO handle storenarrowmatch and figure out if we need the repo arg
99 # TODO handle storenarrowmatch and figure out if we need the repo arg
99 return gitlog.manifestlog(self.git, self._db)
100 return gitlog.manifestlog(self.git, self._db)
100
101
101 def invalidatecaches(self):
102 def invalidatecaches(self):
102 pass
103 pass
103
104
104 def write(self, tr=None):
105 def write(self, tr=None):
105 # normally this handles things like fncache writes, which we don't have
106 # normally this handles things like fncache writes, which we don't have
106 pass
107 pass
107
108
108
109
109 def _makestore(orig, requirements, storebasepath, vfstype):
110 def _makestore(orig, requirements, storebasepath, vfstype):
110 if b'git' in requirements:
111 if b'git' in requirements:
111 if not os.path.exists(os.path.join(storebasepath, b'..', b'.git')):
112 if not os.path.exists(os.path.join(storebasepath, b'..', b'.git')):
112 raise error.Abort(
113 raise error.Abort(
113 _(
114 _(
114 b'repository specified git format in '
115 b'repository specified git format in '
115 b'.hg/requires but has no .git directory'
116 b'.hg/requires but has no .git directory'
116 )
117 )
117 )
118 )
118 # Check for presence of pygit2 only here. The assumption is that we'll
119 # Check for presence of pygit2 only here. The assumption is that we'll
119 # run this code iff we'll later need pygit2.
120 # run this code iff we'll later need pygit2.
120 if gitutil.get_pygit2() is None:
121 if gitutil.get_pygit2() is None:
121 raise error.Abort(
122 raise error.Abort(
122 _(
123 _(
123 b'the git extension requires the Python '
124 b'the git extension requires the Python '
124 b'pygit2 library to be installed'
125 b'pygit2 library to be installed'
125 )
126 )
126 )
127 )
127
128
128 return gitstore(storebasepath, vfstype)
129 return gitstore(storebasepath, vfstype)
129 return orig(requirements, storebasepath, vfstype)
130 return orig(requirements, storebasepath, vfstype)
130
131
131
132
132 class gitfilestorage(object):
133 class gitfilestorage(object):
133 def file(self, path):
134 def file(self, path):
134 if path[0:1] == b'/':
135 if path[0:1] == b'/':
135 path = path[1:]
136 path = path[1:]
136 return gitlog.filelog(self.store.git, self.store._db, path)
137 return gitlog.filelog(self.store.git, self.store._db, path)
137
138
138
139
139 def _makefilestorage(orig, requirements, features, **kwargs):
140 def _makefilestorage(orig, requirements, features, **kwargs):
140 store = kwargs['store']
141 store = kwargs['store']
141 if isinstance(store, gitstore):
142 if isinstance(store, gitstore):
142 return gitfilestorage
143 return gitfilestorage
143 return orig(requirements, features, **kwargs)
144 return orig(requirements, features, **kwargs)
144
145
145
146
146 def _setupdothg(ui, path):
147 def _setupdothg(ui, path):
147 dothg = os.path.join(path, b'.hg')
148 dothg = os.path.join(path, b'.hg')
148 if os.path.exists(dothg):
149 if os.path.exists(dothg):
149 ui.warn(_(b'git repo already initialized for hg\n'))
150 ui.warn(_(b'git repo already initialized for hg\n'))
150 else:
151 else:
151 os.mkdir(os.path.join(path, b'.hg'))
152 os.mkdir(os.path.join(path, b'.hg'))
152 # TODO is it ok to extend .git/info/exclude like this?
153 # TODO is it ok to extend .git/info/exclude like this?
153 with open(
154 with open(
154 os.path.join(path, b'.git', b'info', b'exclude'), 'ab'
155 os.path.join(path, b'.git', b'info', b'exclude'), 'ab'
155 ) as exclude:
156 ) as exclude:
156 exclude.write(b'\n.hg\n')
157 exclude.write(b'\n.hg\n')
157 with open(os.path.join(dothg, b'requires'), 'wb') as f:
158 with open(os.path.join(dothg, b'requires'), 'wb') as f:
158 f.write(b'git\n')
159 f.write(b'git\n')
159
160
160
161
161 _BMS_PREFIX = 'refs/heads/'
162 _BMS_PREFIX = 'refs/heads/'
162
163
163
164
164 class gitbmstore(object):
165 class gitbmstore(object):
165 def __init__(self, gitrepo):
166 def __init__(self, gitrepo):
166 self.gitrepo = gitrepo
167 self.gitrepo = gitrepo
167 self._aclean = True
168 self._aclean = True
168 self._active = gitrepo.references['HEAD'] # git head, not mark
169 self._active = gitrepo.references['HEAD'] # git head, not mark
169
170
170 def __contains__(self, name):
171 def __contains__(self, name):
171 return (
172 return (
172 _BMS_PREFIX + pycompat.fsdecode(name)
173 _BMS_PREFIX + pycompat.fsdecode(name)
173 ) in self.gitrepo.references
174 ) in self.gitrepo.references
174
175
175 def __iter__(self):
176 def __iter__(self):
176 for r in self.gitrepo.listall_references():
177 for r in self.gitrepo.listall_references():
177 if r.startswith(_BMS_PREFIX):
178 if r.startswith(_BMS_PREFIX):
178 yield pycompat.fsencode(r[len(_BMS_PREFIX) :])
179 yield pycompat.fsencode(r[len(_BMS_PREFIX) :])
179
180
180 def __getitem__(self, k):
181 def __getitem__(self, k):
181 return (
182 return (
182 self.gitrepo.references[_BMS_PREFIX + pycompat.fsdecode(k)]
183 self.gitrepo.references[_BMS_PREFIX + pycompat.fsdecode(k)]
183 .peel()
184 .peel()
184 .id.raw
185 .id.raw
185 )
186 )
186
187
187 def get(self, k, default=None):
188 def get(self, k, default=None):
188 try:
189 try:
189 if k in self:
190 if k in self:
190 return self[k]
191 return self[k]
191 return default
192 return default
192 except gitutil.get_pygit2().InvalidSpecError:
193 except gitutil.get_pygit2().InvalidSpecError:
193 return default
194 return default
194
195
195 @property
196 @property
196 def active(self):
197 def active(self):
197 h = self.gitrepo.references['HEAD']
198 h = self.gitrepo.references['HEAD']
198 if not isinstance(h.target, str) or not h.target.startswith(
199 if not isinstance(h.target, str) or not h.target.startswith(
199 _BMS_PREFIX
200 _BMS_PREFIX
200 ):
201 ):
201 return None
202 return None
202 return pycompat.fsencode(h.target[len(_BMS_PREFIX) :])
203 return pycompat.fsencode(h.target[len(_BMS_PREFIX) :])
203
204
204 @active.setter
205 @active.setter
205 def active(self, mark):
206 def active(self, mark):
206 githead = mark is not None and (_BMS_PREFIX + mark) or None
207 githead = mark is not None and (_BMS_PREFIX + mark) or None
207 if githead is not None and githead not in self.gitrepo.references:
208 if githead is not None and githead not in self.gitrepo.references:
208 raise AssertionError(b'bookmark %s does not exist!' % mark)
209 raise AssertionError(b'bookmark %s does not exist!' % mark)
209
210
210 self._active = githead
211 self._active = githead
211 self._aclean = False
212 self._aclean = False
212
213
213 def _writeactive(self):
214 def _writeactive(self):
214 if self._aclean:
215 if self._aclean:
215 return
216 return
216 self.gitrepo.references.create('HEAD', self._active, True)
217 self.gitrepo.references.create('HEAD', self._active, True)
217 self._aclean = True
218 self._aclean = True
218
219
219 def names(self, node):
220 def names(self, node):
220 r = []
221 r = []
221 for ref in self.gitrepo.listall_references():
222 for ref in self.gitrepo.listall_references():
222 if not ref.startswith(_BMS_PREFIX):
223 if not ref.startswith(_BMS_PREFIX):
223 continue
224 continue
224 if self.gitrepo.references[ref].peel().id.raw != node:
225 if self.gitrepo.references[ref].peel().id.raw != node:
225 continue
226 continue
226 r.append(pycompat.fsencode(ref[len(_BMS_PREFIX) :]))
227 r.append(pycompat.fsencode(ref[len(_BMS_PREFIX) :]))
227 return r
228 return r
228
229
229 # Cleanup opportunity: this is *identical* to core's bookmarks store.
230 # Cleanup opportunity: this is *identical* to core's bookmarks store.
230 def expandname(self, bname):
231 def expandname(self, bname):
231 if bname == b'.':
232 if bname == b'.':
232 if self.active:
233 if self.active:
233 return self.active
234 return self.active
234 raise error.RepoLookupError(_(b"no active bookmark"))
235 raise error.RepoLookupError(_(b"no active bookmark"))
235 return bname
236 return bname
236
237
237 def applychanges(self, repo, tr, changes):
238 def applychanges(self, repo, tr, changes):
238 """Apply a list of changes to bookmarks"""
239 """Apply a list of changes to bookmarks"""
239 # TODO: this should respect transactions, but that's going to
240 # TODO: this should respect transactions, but that's going to
240 # require enlarging the gitbmstore to know how to do in-memory
241 # require enlarging the gitbmstore to know how to do in-memory
241 # temporary writes and read those back prior to transaction
242 # temporary writes and read those back prior to transaction
242 # finalization.
243 # finalization.
243 for name, node in changes:
244 for name, node in changes:
244 if node is None:
245 if node is None:
245 self.gitrepo.references.delete(
246 self.gitrepo.references.delete(
246 _BMS_PREFIX + pycompat.fsdecode(name)
247 _BMS_PREFIX + pycompat.fsdecode(name)
247 )
248 )
248 else:
249 else:
249 self.gitrepo.references.create(
250 self.gitrepo.references.create(
250 _BMS_PREFIX + pycompat.fsdecode(name),
251 _BMS_PREFIX + pycompat.fsdecode(name),
251 gitutil.togitnode(node),
252 gitutil.togitnode(node),
252 force=True,
253 force=True,
253 )
254 )
254
255
255 def checkconflict(self, mark, force=False, target=None):
256 def checkconflict(self, mark, force=False, target=None):
256 githead = _BMS_PREFIX + mark
257 githead = _BMS_PREFIX + mark
257 cur = self.gitrepo.references['HEAD']
258 cur = self.gitrepo.references['HEAD']
258 if githead in self.gitrepo.references and not force:
259 if githead in self.gitrepo.references and not force:
259 if target:
260 if target:
260 if self.gitrepo.references[githead] == target and target == cur:
261 if self.gitrepo.references[githead] == target and target == cur:
261 # re-activating a bookmark
262 # re-activating a bookmark
262 return []
263 return []
263 # moving a bookmark - forward?
264 # moving a bookmark - forward?
264 raise NotImplementedError
265 raise NotImplementedError
265 raise error.Abort(
266 raise error.Abort(
266 _(b"bookmark '%s' already exists (use -f to force)") % mark
267 _(b"bookmark '%s' already exists (use -f to force)") % mark
267 )
268 )
268 if len(mark) > 3 and not force:
269 if len(mark) > 3 and not force:
269 try:
270 try:
270 shadowhash = scmutil.isrevsymbol(self._repo, mark)
271 shadowhash = scmutil.isrevsymbol(self._repo, mark)
271 except error.LookupError: # ambiguous identifier
272 except error.LookupError: # ambiguous identifier
272 shadowhash = False
273 shadowhash = False
273 if shadowhash:
274 if shadowhash:
274 self._repo.ui.warn(
275 self._repo.ui.warn(
275 _(
276 _(
276 b"bookmark %s matches a changeset hash\n"
277 b"bookmark %s matches a changeset hash\n"
277 b"(did you leave a -r out of an 'hg bookmark' "
278 b"(did you leave a -r out of an 'hg bookmark' "
278 b"command?)\n"
279 b"command?)\n"
279 )
280 )
280 % mark
281 % mark
281 )
282 )
282 return []
283 return []
283
284
284
285
285 def init(orig, ui, dest=b'.', **opts):
286 def init(orig, ui, dest=b'.', **opts):
286 if opts.get('git', False):
287 if opts.get('git', False):
287 path = util.abspath(dest)
288 path = util.abspath(dest)
288 # TODO: walk up looking for the git repo
289 # TODO: walk up looking for the git repo
289 _setupdothg(ui, path)
290 _setupdothg(ui, path)
290 return 0
291 return 0
291 return orig(ui, dest=dest, **opts)
292 return orig(ui, dest=dest, **opts)
292
293
293
294
294 def reposetup(ui, repo):
295 def reposetup(ui, repo):
295 if repo.local() and isinstance(repo.store, gitstore):
296 if repo.local() and isinstance(repo.store, gitstore):
296 orig = repo.__class__
297 orig = repo.__class__
297 repo.store._progress_factory = repo.ui.makeprogress
298 repo.store._progress_factory = repo.ui.makeprogress
298 if ui.configbool(b'git', b'log-index-cache-miss'):
299 if ui.configbool(b'git', b'log-index-cache-miss'):
299 repo.store._logfn = repo.ui.warn
300 repo.store._logfn = repo.ui.warn
300
301
301 class gitlocalrepo(orig):
302 class gitlocalrepo(orig):
302 def _makedirstate(self):
303 def _makedirstate(self):
303 # TODO narrow support here
304 # TODO narrow support here
304 return dirstate.gitdirstate(
305 return dirstate.gitdirstate(
305 self.ui, self.vfs.base, self.store.git
306 self.ui, self.vfs.base, self.store.git
306 )
307 )
307
308
308 def commit(self, *args, **kwargs):
309 def commit(self, *args, **kwargs):
309 ret = orig.commit(self, *args, **kwargs)
310 ret = orig.commit(self, *args, **kwargs)
310 if ret is None:
311 if ret is None:
311 # there was nothing to commit, so we should skip
312 # there was nothing to commit, so we should skip
312 # the index fixup logic we'd otherwise do.
313 # the index fixup logic we'd otherwise do.
313 return None
314 return None
314 tid = self.store.git[gitutil.togitnode(ret)].tree.id
315 tid = self.store.git[gitutil.togitnode(ret)].tree.id
315 # DANGER! This will flush any writes staged to the
316 # DANGER! This will flush any writes staged to the
316 # index in Git, but we're sidestepping the index in a
317 # index in Git, but we're sidestepping the index in a
317 # way that confuses git when we commit. Alas.
318 # way that confuses git when we commit. Alas.
318 self.store.git.index.read_tree(tid)
319 self.store.git.index.read_tree(tid)
319 self.store.git.index.write()
320 self.store.git.index.write()
320 return ret
321 return ret
321
322
322 @property
323 @property
323 def _bookmarks(self):
324 def _bookmarks(self):
324 return gitbmstore(self.store.git)
325 return gitbmstore(self.store.git)
325
326
326 repo.__class__ = gitlocalrepo
327 repo.__class__ = gitlocalrepo
327 return repo
328 return repo
328
329
329
330
330 def _featuresetup(ui, supported):
331 def _featuresetup(ui, supported):
331 # don't die on seeing a repo with the git requirement
332 # don't die on seeing a repo with the git requirement
332 supported |= {b'git'}
333 supported |= {b'git'}
333
334
334
335
335 def extsetup(ui):
336 def extsetup(ui):
336 extensions.wrapfunction(localrepo, b'makestore', _makestore)
337 extensions.wrapfunction(localrepo, b'makestore', _makestore)
337 extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage)
338 extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage)
338 # Inject --git flag for `hg init`
339 # Inject --git flag for `hg init`
339 entry = extensions.wrapcommand(commands.table, b'init', init)
340 entry = extensions.wrapcommand(commands.table, b'init', init)
340 entry[1].extend(
341 entry[1].extend(
341 [(b'', b'git', None, b'setup up a git repository instead of hg')]
342 [(b'', b'git', None, b'setup up a git repository instead of hg')]
342 )
343 )
343 localrepo.featuresetupfuncs.add(_featuresetup)
344 localrepo.featuresetupfuncs.add(_featuresetup)
@@ -1,369 +1,369 b''
1 from __future__ import absolute_import
1 from __future__ import absolute_import
2
2
3 import contextlib
3 import contextlib
4 import errno
4 import errno
5 import os
5 import os
6
6
7 from mercurial.node import sha1nodeconstants
7 from mercurial.node import sha1nodeconstants
8 from mercurial import (
8 from mercurial import (
9 error,
9 error,
10 extensions,
10 extensions,
11 match as matchmod,
11 match as matchmod,
12 pycompat,
12 pycompat,
13 scmutil,
13 scmutil,
14 util,
14 util,
15 )
15 )
16 from mercurial.interfaces import (
16 from mercurial.interfaces import (
17 dirstate as intdirstate,
17 dirstate as intdirstate,
18 util as interfaceutil,
18 util as interfaceutil,
19 )
19 )
20
20
21 from . import gitutil
21 from . import gitutil
22
22
23 pygit2 = gitutil.get_pygit2()
23 pygit2 = gitutil.get_pygit2()
24
24
25
25
26 def readpatternfile(orig, filepath, warn, sourceinfo=False):
26 def readpatternfile(orig, filepath, warn, sourceinfo=False):
27 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
27 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
28 return orig(filepath, warn, sourceinfo=False)
28 return orig(filepath, warn, sourceinfo=False)
29 result = []
29 result = []
30 warnings = []
30 warnings = []
31 with open(filepath, b'rb') as fp:
31 with open(filepath, b'rb') as fp:
32 for l in fp:
32 for l in fp:
33 l = l.strip()
33 l = l.strip()
34 if not l or l.startswith(b'#'):
34 if not l or l.startswith(b'#'):
35 continue
35 continue
36 if l.startswith(b'!'):
36 if l.startswith(b'!'):
37 warnings.append(b'unsupported ignore pattern %s' % l)
37 warnings.append(b'unsupported ignore pattern %s' % l)
38 continue
38 continue
39 if l.startswith(b'/'):
39 if l.startswith(b'/'):
40 result.append(b'rootglob:' + l[1:])
40 result.append(b'rootglob:' + l[1:])
41 else:
41 else:
42 result.append(b'relglob:' + l)
42 result.append(b'relglob:' + l)
43 return result, warnings
43 return result, warnings
44
44
45
45
46 extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
46 extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
47
47
48
48
49 _STATUS_MAP = {}
49 _STATUS_MAP = {}
50 if pygit2:
50 if pygit2:
51 _STATUS_MAP = {
51 _STATUS_MAP = {
52 pygit2.GIT_STATUS_CONFLICTED: b'm',
52 pygit2.GIT_STATUS_CONFLICTED: b'm',
53 pygit2.GIT_STATUS_CURRENT: b'n',
53 pygit2.GIT_STATUS_CURRENT: b'n',
54 pygit2.GIT_STATUS_IGNORED: b'?',
54 pygit2.GIT_STATUS_IGNORED: b'?',
55 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
55 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
56 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
56 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
57 pygit2.GIT_STATUS_INDEX_NEW: b'a',
57 pygit2.GIT_STATUS_INDEX_NEW: b'a',
58 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
58 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
59 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
59 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
60 pygit2.GIT_STATUS_WT_DELETED: b'r',
60 pygit2.GIT_STATUS_WT_DELETED: b'r',
61 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
61 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
62 pygit2.GIT_STATUS_WT_NEW: b'?',
62 pygit2.GIT_STATUS_WT_NEW: b'?',
63 pygit2.GIT_STATUS_WT_RENAMED: b'a',
63 pygit2.GIT_STATUS_WT_RENAMED: b'a',
64 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
64 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
65 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
65 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
66 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
66 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
67 }
67 }
68
68
69
69
70 @interfaceutil.implementer(intdirstate.idirstate)
70 @interfaceutil.implementer(intdirstate.idirstate)
71 class gitdirstate(object):
71 class gitdirstate(object):
72 def __init__(self, ui, root, gitrepo):
72 def __init__(self, ui, root, gitrepo):
73 self._ui = ui
73 self._ui = ui
74 self._root = os.path.dirname(root)
74 self._root = os.path.dirname(root)
75 self.git = gitrepo
75 self.git = gitrepo
76 self._plchangecallbacks = {}
76 self._plchangecallbacks = {}
77 # TODO: context.poststatusfixup is bad and uses this attribute
77 # TODO: context.poststatusfixup is bad and uses this attribute
78 self._dirty = False
78 self._dirty = False
79
79
80 def p1(self):
80 def p1(self):
81 try:
81 try:
82 return self.git.head.peel().id.raw
82 return self.git.head.peel().id.raw
83 except pygit2.GitError:
83 except pygit2.GitError:
84 # Typically happens when peeling HEAD fails, as in an
84 # Typically happens when peeling HEAD fails, as in an
85 # empty repository.
85 # empty repository.
86 return sha1nodeconstants.nullid
86 return sha1nodeconstants.nullid
87
87
88 def p2(self):
88 def p2(self):
89 # TODO: MERGE_HEAD? something like that, right?
89 # TODO: MERGE_HEAD? something like that, right?
90 return sha1nodeconstants.nullid
90 return sha1nodeconstants.nullid
91
91
92 def setparents(self, p1, p2=None):
92 def setparents(self, p1, p2=None):
93 if p2 is None:
93 if p2 is None:
94 p2 = sha1nodeconstants.nullid
94 p2 = sha1nodeconstants.nullid
95 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
95 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
96 self.git.head.set_target(gitutil.togitnode(p1))
96 self.git.head.set_target(gitutil.togitnode(p1))
97
97
98 @util.propertycache
98 @util.propertycache
99 def identity(self):
99 def identity(self):
100 return util.filestat.frompath(
100 return util.filestat.frompath(
101 os.path.join(self._root, b'.git', b'index')
101 os.path.join(self._root, b'.git', b'index')
102 )
102 )
103
103
104 def branch(self):
104 def branch(self):
105 return b'default'
105 return b'default'
106
106
107 def parents(self):
107 def parents(self):
108 # TODO how on earth do we find p2 if a merge is in flight?
108 # TODO how on earth do we find p2 if a merge is in flight?
109 return self.p1(), sha1nodeconstants.nullid
109 return self.p1(), sha1nodeconstants.nullid
110
110
111 def __iter__(self):
111 def __iter__(self):
112 return (pycompat.fsencode(f.path) for f in self.git.index)
112 return (pycompat.fsencode(f.path) for f in self.git.index)
113
113
114 def items(self):
114 def items(self):
115 for ie in self.git.index:
115 for ie in self.git.index:
116 yield ie.path, None # value should be a DirstateItem
116 yield ie.path, None # value should be a DirstateItem
117
117
118 # py2,3 compat forward
118 # py2,3 compat forward
119 iteritems = items
119 iteritems = items
120
120
121 def __getitem__(self, filename):
121 def __getitem__(self, filename):
122 try:
122 try:
123 gs = self.git.status_file(filename)
123 gs = self.git.status_file(filename)
124 except KeyError:
124 except KeyError:
125 return b'?'
125 return b'?'
126 return _STATUS_MAP[gs]
126 return _STATUS_MAP[gs]
127
127
128 def __contains__(self, filename):
128 def __contains__(self, filename):
129 try:
129 try:
130 gs = self.git.status_file(filename)
130 gs = self.git.status_file(filename)
131 return _STATUS_MAP[gs] != b'?'
131 return _STATUS_MAP[gs] != b'?'
132 except KeyError:
132 except KeyError:
133 return False
133 return False
134
134
135 def status(self, match, subrepos, ignored, clean, unknown):
135 def status(self, match, subrepos, ignored, clean, unknown):
136 listclean = clean
136 listclean = clean
137 # TODO handling of clean files - can we get that from git.status()?
137 # TODO handling of clean files - can we get that from git.status()?
138 modified, added, removed, deleted, unknown, ignored, clean = (
138 modified, added, removed, deleted, unknown, ignored, clean = (
139 [],
139 [],
140 [],
140 [],
141 [],
141 [],
142 [],
142 [],
143 [],
143 [],
144 [],
144 [],
145 [],
145 [],
146 )
146 )
147 gstatus = self.git.status()
147 gstatus = self.git.status()
148 for path, status in gstatus.items():
148 for path, status in gstatus.items():
149 path = pycompat.fsencode(path)
149 path = pycompat.fsencode(path)
150 if not match(path):
150 if not match(path):
151 continue
151 continue
152 if status == pygit2.GIT_STATUS_IGNORED:
152 if status == pygit2.GIT_STATUS_IGNORED:
153 if path.endswith(b'/'):
153 if path.endswith(b'/'):
154 continue
154 continue
155 ignored.append(path)
155 ignored.append(path)
156 elif status in (
156 elif status in (
157 pygit2.GIT_STATUS_WT_MODIFIED,
157 pygit2.GIT_STATUS_WT_MODIFIED,
158 pygit2.GIT_STATUS_INDEX_MODIFIED,
158 pygit2.GIT_STATUS_INDEX_MODIFIED,
159 pygit2.GIT_STATUS_WT_MODIFIED
159 pygit2.GIT_STATUS_WT_MODIFIED
160 | pygit2.GIT_STATUS_INDEX_MODIFIED,
160 | pygit2.GIT_STATUS_INDEX_MODIFIED,
161 ):
161 ):
162 modified.append(path)
162 modified.append(path)
163 elif status == pygit2.GIT_STATUS_INDEX_NEW:
163 elif status == pygit2.GIT_STATUS_INDEX_NEW:
164 added.append(path)
164 added.append(path)
165 elif status == pygit2.GIT_STATUS_WT_NEW:
165 elif status == pygit2.GIT_STATUS_WT_NEW:
166 unknown.append(path)
166 unknown.append(path)
167 elif status == pygit2.GIT_STATUS_WT_DELETED:
167 elif status == pygit2.GIT_STATUS_WT_DELETED:
168 deleted.append(path)
168 deleted.append(path)
169 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
169 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
170 removed.append(path)
170 removed.append(path)
171 else:
171 else:
172 raise error.Abort(
172 raise error.Abort(
173 b'unhandled case: status for %r is %r' % (path, status)
173 b'unhandled case: status for %r is %r' % (path, status)
174 )
174 )
175
175
176 if listclean:
176 if listclean:
177 observed = set(
177 observed = set(
178 modified + added + removed + deleted + unknown + ignored
178 modified + added + removed + deleted + unknown + ignored
179 )
179 )
180 index = self.git.index
180 index = self.git.index
181 index.read()
181 index.read()
182 for entry in index:
182 for entry in index:
183 path = pycompat.fsencode(entry.path)
183 path = pycompat.fsencode(entry.path)
184 if not match(path):
184 if not match(path):
185 continue
185 continue
186 if path in observed:
186 if path in observed:
187 continue # already in some other set
187 continue # already in some other set
188 if path[-1] == b'/':
188 if path[-1] == b'/':
189 continue # directory
189 continue # directory
190 clean.append(path)
190 clean.append(path)
191
191
192 # TODO are we really always sure of status here?
192 # TODO are we really always sure of status here?
193 return (
193 return (
194 False,
194 False,
195 scmutil.status(
195 scmutil.status(
196 modified, added, removed, deleted, unknown, ignored, clean
196 modified, added, removed, deleted, unknown, ignored, clean
197 ),
197 ),
198 )
198 )
199
199
200 def flagfunc(self, buildfallback):
200 def flagfunc(self, buildfallback):
201 # TODO we can do better
201 # TODO we can do better
202 return buildfallback()
202 return buildfallback()
203
203
204 def getcwd(self):
204 def getcwd(self):
205 # TODO is this a good way to do this?
205 # TODO is this a good way to do this?
206 return os.path.dirname(
206 return os.path.dirname(
207 os.path.dirname(pycompat.fsencode(self.git.path))
207 os.path.dirname(pycompat.fsencode(self.git.path))
208 )
208 )
209
209
210 def normalize(self, path):
210 def normalize(self, path):
211 normed = util.normcase(path)
211 normed = util.normcase(path)
212 assert normed == path, b"TODO handling of case folding: %s != %s" % (
212 assert normed == path, b"TODO handling of case folding: %s != %s" % (
213 normed,
213 normed,
214 path,
214 path,
215 )
215 )
216 return path
216 return path
217
217
218 @property
218 @property
219 def _checklink(self):
219 def _checklink(self):
220 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
220 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
221
221
222 def copies(self):
222 def copies(self):
223 # TODO support copies?
223 # TODO support copies?
224 return {}
224 return {}
225
225
226 # # TODO what the heck is this
226 # # TODO what the heck is this
227 _filecache = set()
227 _filecache = set()
228
228
229 def pendingparentchange(self):
229 def pendingparentchange(self):
230 # TODO: we need to implement the context manager bits and
230 # TODO: we need to implement the context manager bits and
231 # correctly stage/revert index edits.
231 # correctly stage/revert index edits.
232 return False
232 return False
233
233
234 def write(self, tr):
234 def write(self, tr):
235 # TODO: call parent change callbacks
235 # TODO: call parent change callbacks
236
236
237 if tr:
237 if tr:
238
238
239 def writeinner(category):
239 def writeinner(category):
240 self.git.index.write()
240 self.git.index.write()
241
241
242 tr.addpending(b'gitdirstate', writeinner)
242 tr.addpending(b'gitdirstate', writeinner)
243 else:
243 else:
244 self.git.index.write()
244 self.git.index.write()
245
245
246 def pathto(self, f, cwd=None):
246 def pathto(self, f, cwd=None):
247 if cwd is None:
247 if cwd is None:
248 cwd = self.getcwd()
248 cwd = self.getcwd()
249 # TODO core dirstate does something about slashes here
249 # TODO core dirstate does something about slashes here
250 assert isinstance(f, bytes)
250 assert isinstance(f, bytes)
251 r = util.pathto(self._root, cwd, f)
251 r = util.pathto(self._root, cwd, f)
252 return r
252 return r
253
253
254 def matches(self, match):
254 def matches(self, match):
255 for x in self.git.index:
255 for x in self.git.index:
256 p = pycompat.fsencode(x.path)
256 p = pycompat.fsencode(x.path)
257 if match(p):
257 if match(p):
258 yield p
258 yield p
259
259
260 def set_clean(self, f, parentfiledata=None):
260 def set_clean(self, f, parentfiledata):
261 """Mark a file normal and clean."""
261 """Mark a file normal and clean."""
262 # TODO: for now we just let libgit2 re-stat the file. We can
262 # TODO: for now we just let libgit2 re-stat the file. We can
263 # clearly do better.
263 # clearly do better.
264
264
265 def set_possibly_dirty(self, f):
265 def set_possibly_dirty(self, f):
266 """Mark a file normal, but possibly dirty."""
266 """Mark a file normal, but possibly dirty."""
267 # TODO: for now we just let libgit2 re-stat the file. We can
267 # TODO: for now we just let libgit2 re-stat the file. We can
268 # clearly do better.
268 # clearly do better.
269
269
270 def walk(self, match, subrepos, unknown, ignored, full=True):
270 def walk(self, match, subrepos, unknown, ignored, full=True):
271 # TODO: we need to use .status() and not iterate the index,
271 # TODO: we need to use .status() and not iterate the index,
272 # because the index doesn't force a re-walk and so `hg add` of
272 # because the index doesn't force a re-walk and so `hg add` of
273 # a new file without an intervening call to status will
273 # a new file without an intervening call to status will
274 # silently do nothing.
274 # silently do nothing.
275 r = {}
275 r = {}
276 cwd = self.getcwd()
276 cwd = self.getcwd()
277 for path, status in self.git.status().items():
277 for path, status in self.git.status().items():
278 if path.startswith('.hg/'):
278 if path.startswith('.hg/'):
279 continue
279 continue
280 path = pycompat.fsencode(path)
280 path = pycompat.fsencode(path)
281 if not match(path):
281 if not match(path):
282 continue
282 continue
283 # TODO construct the stat info from the status object?
283 # TODO construct the stat info from the status object?
284 try:
284 try:
285 s = os.stat(os.path.join(cwd, path))
285 s = os.stat(os.path.join(cwd, path))
286 except OSError as e:
286 except OSError as e:
287 if e.errno != errno.ENOENT:
287 if e.errno != errno.ENOENT:
288 raise
288 raise
289 continue
289 continue
290 r[path] = s
290 r[path] = s
291 return r
291 return r
292
292
293 def savebackup(self, tr, backupname):
293 def savebackup(self, tr, backupname):
294 # TODO: figure out a strategy for saving index backups.
294 # TODO: figure out a strategy for saving index backups.
295 pass
295 pass
296
296
297 def restorebackup(self, tr, backupname):
297 def restorebackup(self, tr, backupname):
298 # TODO: figure out a strategy for saving index backups.
298 # TODO: figure out a strategy for saving index backups.
299 pass
299 pass
300
300
301 def set_tracked(self, f):
301 def set_tracked(self, f):
302 uf = pycompat.fsdecode(f)
302 uf = pycompat.fsdecode(f)
303 if uf in self.git.index:
303 if uf in self.git.index:
304 return False
304 return False
305 index = self.git.index
305 index = self.git.index
306 index.read()
306 index.read()
307 index.add(uf)
307 index.add(uf)
308 index.write()
308 index.write()
309 return True
309 return True
310
310
311 def add(self, f):
311 def add(self, f):
312 index = self.git.index
312 index = self.git.index
313 index.read()
313 index.read()
314 index.add(pycompat.fsdecode(f))
314 index.add(pycompat.fsdecode(f))
315 index.write()
315 index.write()
316
316
317 def drop(self, f):
317 def drop(self, f):
318 index = self.git.index
318 index = self.git.index
319 index.read()
319 index.read()
320 fs = pycompat.fsdecode(f)
320 fs = pycompat.fsdecode(f)
321 if fs in index:
321 if fs in index:
322 index.remove(fs)
322 index.remove(fs)
323 index.write()
323 index.write()
324
324
325 def set_untracked(self, f):
325 def set_untracked(self, f):
326 index = self.git.index
326 index = self.git.index
327 index.read()
327 index.read()
328 fs = pycompat.fsdecode(f)
328 fs = pycompat.fsdecode(f)
329 if fs in index:
329 if fs in index:
330 index.remove(fs)
330 index.remove(fs)
331 index.write()
331 index.write()
332 return True
332 return True
333 return False
333 return False
334
334
335 def remove(self, f):
335 def remove(self, f):
336 index = self.git.index
336 index = self.git.index
337 index.read()
337 index.read()
338 index.remove(pycompat.fsdecode(f))
338 index.remove(pycompat.fsdecode(f))
339 index.write()
339 index.write()
340
340
341 def copied(self, path):
341 def copied(self, path):
342 # TODO: track copies?
342 # TODO: track copies?
343 return None
343 return None
344
344
345 def prefetch_parents(self):
345 def prefetch_parents(self):
346 # TODO
346 # TODO
347 pass
347 pass
348
348
349 def update_file(self, *args, **kwargs):
349 def update_file(self, *args, **kwargs):
350 # TODO
350 # TODO
351 pass
351 pass
352
352
353 @contextlib.contextmanager
353 @contextlib.contextmanager
354 def parentchange(self):
354 def parentchange(self):
355 # TODO: track this maybe?
355 # TODO: track this maybe?
356 yield
356 yield
357
357
358 def addparentchangecallback(self, category, callback):
358 def addparentchangecallback(self, category, callback):
359 # TODO: should this be added to the dirstate interface?
359 # TODO: should this be added to the dirstate interface?
360 self._plchangecallbacks[category] = callback
360 self._plchangecallbacks[category] = callback
361
361
362 def clearbackup(self, tr, backupname):
362 def clearbackup(self, tr, backupname):
363 # TODO
363 # TODO
364 pass
364 pass
365
365
366 def setbranch(self, branch):
366 def setbranch(self, branch):
367 raise error.Abort(
367 raise error.Abort(
368 b'git repos do not support branches. try using bookmarks'
368 b'git repos do not support branches. try using bookmarks'
369 )
369 )
@@ -1,2678 +1,2690 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but allow edits before making new commit
37 # e, edit = use commit, but allow edits before making new commit
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 # b, base = checkout changeset and apply further changesets from there
42 # b, base = checkout changeset and apply further changesets from there
43 #
43 #
44
44
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 for each revision in your history. For example, if you had meant to add gamma
46 for each revision in your history. For example, if you had meant to add gamma
47 before beta, and then wanted to add delta in the same revision as beta, you
47 before beta, and then wanted to add delta in the same revision as beta, you
48 would reorganize the file to look like this::
48 would reorganize the file to look like this::
49
49
50 pick 030b686bedc4 Add gamma
50 pick 030b686bedc4 Add gamma
51 pick c561b4e977df Add beta
51 pick c561b4e977df Add beta
52 fold 7c2fd3b9020c Add delta
52 fold 7c2fd3b9020c Add delta
53
53
54 # Edit history between c561b4e977df and 7c2fd3b9020c
54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 #
55 #
56 # Commits are listed from least to most recent
56 # Commits are listed from least to most recent
57 #
57 #
58 # Commands:
58 # Commands:
59 # p, pick = use commit
59 # p, pick = use commit
60 # e, edit = use commit, but allow edits before making new commit
60 # e, edit = use commit, but allow edits before making new commit
61 # f, fold = use commit, but combine it with the one above
61 # f, fold = use commit, but combine it with the one above
62 # r, roll = like fold, but discard this commit's description and date
62 # r, roll = like fold, but discard this commit's description and date
63 # d, drop = remove commit from history
63 # d, drop = remove commit from history
64 # m, mess = edit commit message without changing commit content
64 # m, mess = edit commit message without changing commit content
65 # b, base = checkout changeset and apply further changesets from there
65 # b, base = checkout changeset and apply further changesets from there
66 #
66 #
67
67
68 At which point you close the editor and ``histedit`` starts working. When you
68 At which point you close the editor and ``histedit`` starts working. When you
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 those revisions together, offering you a chance to clean up the commit message::
70 those revisions together, offering you a chance to clean up the commit message::
71
71
72 Add beta
72 Add beta
73 ***
73 ***
74 Add delta
74 Add delta
75
75
76 Edit the commit message to your liking, then close the editor. The date used
76 Edit the commit message to your liking, then close the editor. The date used
77 for the commit will be the later of the two commits' dates. For this example,
77 for the commit will be the later of the two commits' dates. For this example,
78 let's assume that the commit message was changed to ``Add beta and delta.``
78 let's assume that the commit message was changed to ``Add beta and delta.``
79 After histedit has run and had a chance to remove any old or temporary
79 After histedit has run and had a chance to remove any old or temporary
80 revisions it needed, the history looks like this::
80 revisions it needed, the history looks like this::
81
81
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 | Add beta and delta.
83 | Add beta and delta.
84 |
84 |
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 | Add gamma
86 | Add gamma
87 |
87 |
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 Add alpha
89 Add alpha
90
90
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 ones) until after it has completed all the editing operations, so it will
92 ones) until after it has completed all the editing operations, so it will
93 probably perform several strip operations when it's done. For the above example,
93 probably perform several strip operations when it's done. For the above example,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 so you might need to be a little patient. You can choose to keep the original
95 so you might need to be a little patient. You can choose to keep the original
96 revisions by passing the ``--keep`` flag.
96 revisions by passing the ``--keep`` flag.
97
97
98 The ``edit`` operation will drop you back to a command prompt,
98 The ``edit`` operation will drop you back to a command prompt,
99 allowing you to edit files freely, or even use ``hg record`` to commit
99 allowing you to edit files freely, or even use ``hg record`` to commit
100 some changes as a separate commit. When you're done, any remaining
100 some changes as a separate commit. When you're done, any remaining
101 uncommitted changes will be committed as well. When done, run ``hg
101 uncommitted changes will be committed as well. When done, run ``hg
102 histedit --continue`` to finish this step. If there are uncommitted
102 histedit --continue`` to finish this step. If there are uncommitted
103 changes, you'll be prompted for a new commit message, but the default
103 changes, you'll be prompted for a new commit message, but the default
104 commit message will be the original message for the ``edit`` ed
104 commit message will be the original message for the ``edit`` ed
105 revision, and the date of the original commit will be preserved.
105 revision, and the date of the original commit will be preserved.
106
106
107 The ``message`` operation will give you a chance to revise a commit
107 The ``message`` operation will give you a chance to revise a commit
108 message without changing the contents. It's a shortcut for doing
108 message without changing the contents. It's a shortcut for doing
109 ``edit`` immediately followed by `hg histedit --continue``.
109 ``edit`` immediately followed by `hg histedit --continue``.
110
110
111 If ``histedit`` encounters a conflict when moving a revision (while
111 If ``histedit`` encounters a conflict when moving a revision (while
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 ``edit`` with the difference that it won't prompt you for a commit
113 ``edit`` with the difference that it won't prompt you for a commit
114 message when done. If you decide at this point that you don't like how
114 message when done. If you decide at this point that you don't like how
115 much work it will be to rearrange history, or that you made a mistake,
115 much work it will be to rearrange history, or that you made a mistake,
116 you can use ``hg histedit --abort`` to abandon the new changes you
116 you can use ``hg histedit --abort`` to abandon the new changes you
117 have made and return to the state before you attempted to edit your
117 have made and return to the state before you attempted to edit your
118 history.
118 history.
119
119
120 If we clone the histedit-ed example repository above and add four more
120 If we clone the histedit-ed example repository above and add four more
121 changes, such that we have the following history::
121 changes, such that we have the following history::
122
122
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 | Add theta
124 | Add theta
125 |
125 |
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 | Add eta
127 | Add eta
128 |
128 |
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 | Add zeta
130 | Add zeta
131 |
131 |
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 | Add epsilon
133 | Add epsilon
134 |
134 |
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 | Add beta and delta.
136 | Add beta and delta.
137 |
137 |
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 | Add gamma
139 | Add gamma
140 |
140 |
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 Add alpha
142 Add alpha
143
143
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 as running ``hg histedit 836302820282``. If you need plan to push to a
145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 repository that Mercurial does not detect to be related to the source
146 repository that Mercurial does not detect to be related to the source
147 repo, you can add a ``--force`` option.
147 repo, you can add a ``--force`` option.
148
148
149 Config
149 Config
150 ------
150 ------
151
151
152 Histedit rule lines are truncated to 80 characters by default. You
152 Histedit rule lines are truncated to 80 characters by default. You
153 can customize this behavior by setting a different length in your
153 can customize this behavior by setting a different length in your
154 configuration file::
154 configuration file::
155
155
156 [histedit]
156 [histedit]
157 linelen = 120 # truncate rule lines at 120 characters
157 linelen = 120 # truncate rule lines at 120 characters
158
158
159 The summary of a change can be customized as well::
159 The summary of a change can be customized as well::
160
160
161 [histedit]
161 [histedit]
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
163
163
164 The customized summary should be kept short enough that rule lines
164 The customized summary should be kept short enough that rule lines
165 will fit in the configured line length. See above if that requires
165 will fit in the configured line length. See above if that requires
166 customization.
166 customization.
167
167
168 ``hg histedit`` attempts to automatically choose an appropriate base
168 ``hg histedit`` attempts to automatically choose an appropriate base
169 revision to use. To change which base revision is used, define a
169 revision to use. To change which base revision is used, define a
170 revset in your configuration file::
170 revset in your configuration file::
171
171
172 [histedit]
172 [histedit]
173 defaultrev = only(.) & draft()
173 defaultrev = only(.) & draft()
174
174
175 By default each edited revision needs to be present in histedit commands.
175 By default each edited revision needs to be present in histedit commands.
176 To remove revision you need to use ``drop`` operation. You can configure
176 To remove revision you need to use ``drop`` operation. You can configure
177 the drop to be implicit for missing commits by adding::
177 the drop to be implicit for missing commits by adding::
178
178
179 [histedit]
179 [histedit]
180 dropmissing = True
180 dropmissing = True
181
181
182 By default, histedit will close the transaction after each action. For
182 By default, histedit will close the transaction after each action. For
183 performance purposes, you can configure histedit to use a single transaction
183 performance purposes, you can configure histedit to use a single transaction
184 across the entire histedit. WARNING: This setting introduces a significant risk
184 across the entire histedit. WARNING: This setting introduces a significant risk
185 of losing the work you've done in a histedit if the histedit aborts
185 of losing the work you've done in a histedit if the histedit aborts
186 unexpectedly::
186 unexpectedly::
187
187
188 [histedit]
188 [histedit]
189 singletransaction = True
189 singletransaction = True
190
190
191 """
191 """
192
192
193 from __future__ import absolute_import
193 from __future__ import absolute_import
194
194
195 # chistedit dependencies that are not available everywhere
195 # chistedit dependencies that are not available everywhere
196 try:
196 try:
197 import fcntl
197 import fcntl
198 import termios
198 import termios
199 except ImportError:
199 except ImportError:
200 fcntl = None
200 fcntl = None
201 termios = None
201 termios = None
202
202
203 import functools
203 import functools
204 import os
204 import os
205 import struct
205 import struct
206
206
207 from mercurial.i18n import _
207 from mercurial.i18n import _
208 from mercurial.pycompat import (
208 from mercurial.pycompat import (
209 getattr,
209 getattr,
210 open,
210 open,
211 )
211 )
212 from mercurial.node import (
212 from mercurial.node import (
213 bin,
213 bin,
214 hex,
214 hex,
215 short,
215 short,
216 )
216 )
217 from mercurial import (
217 from mercurial import (
218 bundle2,
218 bundle2,
219 cmdutil,
219 cmdutil,
220 context,
220 context,
221 copies,
221 copies,
222 destutil,
222 destutil,
223 discovery,
223 discovery,
224 encoding,
224 encoding,
225 error,
225 error,
226 exchange,
226 exchange,
227 extensions,
227 extensions,
228 hg,
228 hg,
229 logcmdutil,
229 logcmdutil,
230 merge as mergemod,
230 merge as mergemod,
231 mergestate as mergestatemod,
231 mergestate as mergestatemod,
232 mergeutil,
232 mergeutil,
233 obsolete,
233 obsolete,
234 pycompat,
234 pycompat,
235 registrar,
235 registrar,
236 repair,
236 repair,
237 rewriteutil,
237 rewriteutil,
238 scmutil,
238 scmutil,
239 state as statemod,
239 state as statemod,
240 util,
240 util,
241 )
241 )
242 from mercurial.utils import (
242 from mercurial.utils import (
243 dateutil,
243 dateutil,
244 stringutil,
244 stringutil,
245 urlutil,
245 urlutil,
246 )
246 )
247
247
248 pickle = util.pickle
248 pickle = util.pickle
249 cmdtable = {}
249 cmdtable = {}
250 command = registrar.command(cmdtable)
250 command = registrar.command(cmdtable)
251
251
252 configtable = {}
252 configtable = {}
253 configitem = registrar.configitem(configtable)
253 configitem = registrar.configitem(configtable)
254 configitem(
254 configitem(
255 b'experimental',
255 b'experimental',
256 b'histedit.autoverb',
256 b'histedit.autoverb',
257 default=False,
257 default=False,
258 )
258 )
259 configitem(
259 configitem(
260 b'histedit',
260 b'histedit',
261 b'defaultrev',
261 b'defaultrev',
262 default=None,
262 default=None,
263 )
263 )
264 configitem(
264 configitem(
265 b'histedit',
265 b'histedit',
266 b'dropmissing',
266 b'dropmissing',
267 default=False,
267 default=False,
268 )
268 )
269 configitem(
269 configitem(
270 b'histedit',
270 b'histedit',
271 b'linelen',
271 b'linelen',
272 default=80,
272 default=80,
273 )
273 )
274 configitem(
274 configitem(
275 b'histedit',
275 b'histedit',
276 b'singletransaction',
276 b'singletransaction',
277 default=False,
277 default=False,
278 )
278 )
279 configitem(
279 configitem(
280 b'ui',
280 b'ui',
281 b'interface.histedit',
281 b'interface.histedit',
282 default=None,
282 default=None,
283 )
283 )
284 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
284 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
285 # TODO: Teach the text-based histedit interface to respect this config option
285 # TODO: Teach the text-based histedit interface to respect this config option
286 # before we make it non-experimental.
286 # before we make it non-experimental.
287 configitem(
287 configitem(
288 b'histedit', b'later-commits-first', default=False, experimental=True
288 b'histedit', b'later-commits-first', default=False, experimental=True
289 )
289 )
290
290
291 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
291 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
292 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
292 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
293 # be specifying the version(s) of Mercurial they are tested with, or
293 # be specifying the version(s) of Mercurial they are tested with, or
294 # leave the attribute unspecified.
294 # leave the attribute unspecified.
295 testedwith = b'ships-with-hg-core'
295 testedwith = b'ships-with-hg-core'
296
296
297 actiontable = {}
297 actiontable = {}
298 primaryactions = set()
298 primaryactions = set()
299 secondaryactions = set()
299 secondaryactions = set()
300 tertiaryactions = set()
300 tertiaryactions = set()
301 internalactions = set()
301 internalactions = set()
302
302
303
303
304 def geteditcomment(ui, first, last):
304 def geteditcomment(ui, first, last):
305 """construct the editor comment
305 """construct the editor comment
306 The comment includes::
306 The comment includes::
307 - an intro
307 - an intro
308 - sorted primary commands
308 - sorted primary commands
309 - sorted short commands
309 - sorted short commands
310 - sorted long commands
310 - sorted long commands
311 - additional hints
311 - additional hints
312
312
313 Commands are only included once.
313 Commands are only included once.
314 """
314 """
315 intro = _(
315 intro = _(
316 b"""Edit history between %s and %s
316 b"""Edit history between %s and %s
317
317
318 Commits are listed from least to most recent
318 Commits are listed from least to most recent
319
319
320 You can reorder changesets by reordering the lines
320 You can reorder changesets by reordering the lines
321
321
322 Commands:
322 Commands:
323 """
323 """
324 )
324 )
325 actions = []
325 actions = []
326
326
327 def addverb(v):
327 def addverb(v):
328 a = actiontable[v]
328 a = actiontable[v]
329 lines = a.message.split(b"\n")
329 lines = a.message.split(b"\n")
330 if len(a.verbs):
330 if len(a.verbs):
331 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
331 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
332 actions.append(b" %s = %s" % (v, lines[0]))
332 actions.append(b" %s = %s" % (v, lines[0]))
333 actions.extend([b' %s'] * (len(lines) - 1))
333 actions.extend([b' %s'] * (len(lines) - 1))
334
334
335 for v in (
335 for v in (
336 sorted(primaryactions)
336 sorted(primaryactions)
337 + sorted(secondaryactions)
337 + sorted(secondaryactions)
338 + sorted(tertiaryactions)
338 + sorted(tertiaryactions)
339 ):
339 ):
340 addverb(v)
340 addverb(v)
341 actions.append(b'')
341 actions.append(b'')
342
342
343 hints = []
343 hints = []
344 if ui.configbool(b'histedit', b'dropmissing'):
344 if ui.configbool(b'histedit', b'dropmissing'):
345 hints.append(
345 hints.append(
346 b"Deleting a changeset from the list "
346 b"Deleting a changeset from the list "
347 b"will DISCARD it from the edited history!"
347 b"will DISCARD it from the edited history!"
348 )
348 )
349
349
350 lines = (intro % (first, last)).split(b'\n') + actions + hints
350 lines = (intro % (first, last)).split(b'\n') + actions + hints
351
351
352 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
352 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
353
353
354
354
355 class histeditstate(object):
355 class histeditstate(object):
356 def __init__(self, repo):
356 def __init__(self, repo):
357 self.repo = repo
357 self.repo = repo
358 self.actions = None
358 self.actions = None
359 self.keep = None
359 self.keep = None
360 self.topmost = None
360 self.topmost = None
361 self.parentctxnode = None
361 self.parentctxnode = None
362 self.lock = None
362 self.lock = None
363 self.wlock = None
363 self.wlock = None
364 self.backupfile = None
364 self.backupfile = None
365 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
365 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
366 self.replacements = []
366 self.replacements = []
367
367
368 def read(self):
368 def read(self):
369 """Load histedit state from disk and set fields appropriately."""
369 """Load histedit state from disk and set fields appropriately."""
370 if not self.stateobj.exists():
370 if not self.stateobj.exists():
371 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
371 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
372
372
373 data = self._read()
373 data = self._read()
374
374
375 self.parentctxnode = data[b'parentctxnode']
375 self.parentctxnode = data[b'parentctxnode']
376 actions = parserules(data[b'rules'], self)
376 actions = parserules(data[b'rules'], self)
377 self.actions = actions
377 self.actions = actions
378 self.keep = data[b'keep']
378 self.keep = data[b'keep']
379 self.topmost = data[b'topmost']
379 self.topmost = data[b'topmost']
380 self.replacements = data[b'replacements']
380 self.replacements = data[b'replacements']
381 self.backupfile = data[b'backupfile']
381 self.backupfile = data[b'backupfile']
382
382
383 def _read(self):
383 def _read(self):
384 fp = self.repo.vfs.read(b'histedit-state')
384 fp = self.repo.vfs.read(b'histedit-state')
385 if fp.startswith(b'v1\n'):
385 if fp.startswith(b'v1\n'):
386 data = self._load()
386 data = self._load()
387 parentctxnode, rules, keep, topmost, replacements, backupfile = data
387 parentctxnode, rules, keep, topmost, replacements, backupfile = data
388 else:
388 else:
389 data = pickle.loads(fp)
389 data = pickle.loads(fp)
390 parentctxnode, rules, keep, topmost, replacements = data
390 parentctxnode, rules, keep, topmost, replacements = data
391 backupfile = None
391 backupfile = None
392 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
392 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
393
393
394 return {
394 return {
395 b'parentctxnode': parentctxnode,
395 b'parentctxnode': parentctxnode,
396 b"rules": rules,
396 b"rules": rules,
397 b"keep": keep,
397 b"keep": keep,
398 b"topmost": topmost,
398 b"topmost": topmost,
399 b"replacements": replacements,
399 b"replacements": replacements,
400 b"backupfile": backupfile,
400 b"backupfile": backupfile,
401 }
401 }
402
402
403 def write(self, tr=None):
403 def write(self, tr=None):
404 if tr:
404 if tr:
405 tr.addfilegenerator(
405 tr.addfilegenerator(
406 b'histedit-state',
406 b'histedit-state',
407 (b'histedit-state',),
407 (b'histedit-state',),
408 self._write,
408 self._write,
409 location=b'plain',
409 location=b'plain',
410 )
410 )
411 else:
411 else:
412 with self.repo.vfs(b"histedit-state", b"w") as f:
412 with self.repo.vfs(b"histedit-state", b"w") as f:
413 self._write(f)
413 self._write(f)
414
414
415 def _write(self, fp):
415 def _write(self, fp):
416 fp.write(b'v1\n')
416 fp.write(b'v1\n')
417 fp.write(b'%s\n' % hex(self.parentctxnode))
417 fp.write(b'%s\n' % hex(self.parentctxnode))
418 fp.write(b'%s\n' % hex(self.topmost))
418 fp.write(b'%s\n' % hex(self.topmost))
419 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
419 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
420 fp.write(b'%d\n' % len(self.actions))
420 fp.write(b'%d\n' % len(self.actions))
421 for action in self.actions:
421 for action in self.actions:
422 fp.write(b'%s\n' % action.tostate())
422 fp.write(b'%s\n' % action.tostate())
423 fp.write(b'%d\n' % len(self.replacements))
423 fp.write(b'%d\n' % len(self.replacements))
424 for replacement in self.replacements:
424 for replacement in self.replacements:
425 fp.write(
425 fp.write(
426 b'%s%s\n'
426 b'%s%s\n'
427 % (
427 % (
428 hex(replacement[0]),
428 hex(replacement[0]),
429 b''.join(hex(r) for r in replacement[1]),
429 b''.join(hex(r) for r in replacement[1]),
430 )
430 )
431 )
431 )
432 backupfile = self.backupfile
432 backupfile = self.backupfile
433 if not backupfile:
433 if not backupfile:
434 backupfile = b''
434 backupfile = b''
435 fp.write(b'%s\n' % backupfile)
435 fp.write(b'%s\n' % backupfile)
436
436
437 def _load(self):
437 def _load(self):
438 fp = self.repo.vfs(b'histedit-state', b'r')
438 fp = self.repo.vfs(b'histedit-state', b'r')
439 lines = [l[:-1] for l in fp.readlines()]
439 lines = [l[:-1] for l in fp.readlines()]
440
440
441 index = 0
441 index = 0
442 lines[index] # version number
442 lines[index] # version number
443 index += 1
443 index += 1
444
444
445 parentctxnode = bin(lines[index])
445 parentctxnode = bin(lines[index])
446 index += 1
446 index += 1
447
447
448 topmost = bin(lines[index])
448 topmost = bin(lines[index])
449 index += 1
449 index += 1
450
450
451 keep = lines[index] == b'True'
451 keep = lines[index] == b'True'
452 index += 1
452 index += 1
453
453
454 # Rules
454 # Rules
455 rules = []
455 rules = []
456 rulelen = int(lines[index])
456 rulelen = int(lines[index])
457 index += 1
457 index += 1
458 for i in pycompat.xrange(rulelen):
458 for i in pycompat.xrange(rulelen):
459 ruleaction = lines[index]
459 ruleaction = lines[index]
460 index += 1
460 index += 1
461 rule = lines[index]
461 rule = lines[index]
462 index += 1
462 index += 1
463 rules.append((ruleaction, rule))
463 rules.append((ruleaction, rule))
464
464
465 # Replacements
465 # Replacements
466 replacements = []
466 replacements = []
467 replacementlen = int(lines[index])
467 replacementlen = int(lines[index])
468 index += 1
468 index += 1
469 for i in pycompat.xrange(replacementlen):
469 for i in pycompat.xrange(replacementlen):
470 replacement = lines[index]
470 replacement = lines[index]
471 original = bin(replacement[:40])
471 original = bin(replacement[:40])
472 succ = [
472 succ = [
473 bin(replacement[i : i + 40])
473 bin(replacement[i : i + 40])
474 for i in range(40, len(replacement), 40)
474 for i in range(40, len(replacement), 40)
475 ]
475 ]
476 replacements.append((original, succ))
476 replacements.append((original, succ))
477 index += 1
477 index += 1
478
478
479 backupfile = lines[index]
479 backupfile = lines[index]
480 index += 1
480 index += 1
481
481
482 fp.close()
482 fp.close()
483
483
484 return parentctxnode, rules, keep, topmost, replacements, backupfile
484 return parentctxnode, rules, keep, topmost, replacements, backupfile
485
485
486 def clear(self):
486 def clear(self):
487 if self.inprogress():
487 if self.inprogress():
488 self.repo.vfs.unlink(b'histedit-state')
488 self.repo.vfs.unlink(b'histedit-state')
489
489
490 def inprogress(self):
490 def inprogress(self):
491 return self.repo.vfs.exists(b'histedit-state')
491 return self.repo.vfs.exists(b'histedit-state')
492
492
493
493
494 class histeditaction(object):
494 class histeditaction(object):
495 def __init__(self, state, node):
495 def __init__(self, state, node):
496 self.state = state
496 self.state = state
497 self.repo = state.repo
497 self.repo = state.repo
498 self.node = node
498 self.node = node
499
499
500 @classmethod
500 @classmethod
501 def fromrule(cls, state, rule):
501 def fromrule(cls, state, rule):
502 """Parses the given rule, returning an instance of the histeditaction."""
502 """Parses the given rule, returning an instance of the histeditaction."""
503 ruleid = rule.strip().split(b' ', 1)[0]
503 ruleid = rule.strip().split(b' ', 1)[0]
504 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
504 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
505 # Check for validation of rule ids and get the rulehash
505 # Check for validation of rule ids and get the rulehash
506 try:
506 try:
507 rev = bin(ruleid)
507 rev = bin(ruleid)
508 except TypeError:
508 except TypeError:
509 try:
509 try:
510 _ctx = scmutil.revsingle(state.repo, ruleid)
510 _ctx = scmutil.revsingle(state.repo, ruleid)
511 rulehash = _ctx.hex()
511 rulehash = _ctx.hex()
512 rev = bin(rulehash)
512 rev = bin(rulehash)
513 except error.RepoLookupError:
513 except error.RepoLookupError:
514 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
514 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
515 return cls(state, rev)
515 return cls(state, rev)
516
516
517 def verify(self, prev, expected, seen):
517 def verify(self, prev, expected, seen):
518 """Verifies semantic correctness of the rule"""
518 """Verifies semantic correctness of the rule"""
519 repo = self.repo
519 repo = self.repo
520 ha = hex(self.node)
520 ha = hex(self.node)
521 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
521 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
522 if self.node is None:
522 if self.node is None:
523 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
523 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
524 self._verifynodeconstraints(prev, expected, seen)
524 self._verifynodeconstraints(prev, expected, seen)
525
525
526 def _verifynodeconstraints(self, prev, expected, seen):
526 def _verifynodeconstraints(self, prev, expected, seen):
527 # by default command need a node in the edited list
527 # by default command need a node in the edited list
528 if self.node not in expected:
528 if self.node not in expected:
529 raise error.ParseError(
529 raise error.ParseError(
530 _(b'%s "%s" changeset was not a candidate')
530 _(b'%s "%s" changeset was not a candidate')
531 % (self.verb, short(self.node)),
531 % (self.verb, short(self.node)),
532 hint=_(b'only use listed changesets'),
532 hint=_(b'only use listed changesets'),
533 )
533 )
534 # and only one command per node
534 # and only one command per node
535 if self.node in seen:
535 if self.node in seen:
536 raise error.ParseError(
536 raise error.ParseError(
537 _(b'duplicated command for changeset %s') % short(self.node)
537 _(b'duplicated command for changeset %s') % short(self.node)
538 )
538 )
539
539
540 def torule(self):
540 def torule(self):
541 """build a histedit rule line for an action
541 """build a histedit rule line for an action
542
542
543 by default lines are in the form:
543 by default lines are in the form:
544 <hash> <rev> <summary>
544 <hash> <rev> <summary>
545 """
545 """
546 ctx = self.repo[self.node]
546 ctx = self.repo[self.node]
547 ui = self.repo.ui
547 ui = self.repo.ui
548 # We don't want color codes in the commit message template, so
548 # We don't want color codes in the commit message template, so
549 # disable the label() template function while we render it.
549 # disable the label() template function while we render it.
550 with ui.configoverride(
550 with ui.configoverride(
551 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
551 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
552 ):
552 ):
553 summary = cmdutil.rendertemplate(
553 summary = cmdutil.rendertemplate(
554 ctx, ui.config(b'histedit', b'summary-template')
554 ctx, ui.config(b'histedit', b'summary-template')
555 )
555 )
556 # Handle the fact that `''.splitlines() => []`
556 # Handle the fact that `''.splitlines() => []`
557 summary = summary.splitlines()[0] if summary else b''
557 summary = summary.splitlines()[0] if summary else b''
558 line = b'%s %s %s' % (self.verb, ctx, summary)
558 line = b'%s %s %s' % (self.verb, ctx, summary)
559 # trim to 75 columns by default so it's not stupidly wide in my editor
559 # trim to 75 columns by default so it's not stupidly wide in my editor
560 # (the 5 more are left for verb)
560 # (the 5 more are left for verb)
561 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
561 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
562 maxlen = max(maxlen, 22) # avoid truncating hash
562 maxlen = max(maxlen, 22) # avoid truncating hash
563 return stringutil.ellipsis(line, maxlen)
563 return stringutil.ellipsis(line, maxlen)
564
564
565 def tostate(self):
565 def tostate(self):
566 """Print an action in format used by histedit state files
566 """Print an action in format used by histedit state files
567 (the first line is a verb, the remainder is the second)
567 (the first line is a verb, the remainder is the second)
568 """
568 """
569 return b"%s\n%s" % (self.verb, hex(self.node))
569 return b"%s\n%s" % (self.verb, hex(self.node))
570
570
571 def run(self):
571 def run(self):
572 """Runs the action. The default behavior is simply apply the action's
572 """Runs the action. The default behavior is simply apply the action's
573 rulectx onto the current parentctx."""
573 rulectx onto the current parentctx."""
574 self.applychange()
574 self.applychange()
575 self.continuedirty()
575 self.continuedirty()
576 return self.continueclean()
576 return self.continueclean()
577
577
578 def applychange(self):
578 def applychange(self):
579 """Applies the changes from this action's rulectx onto the current
579 """Applies the changes from this action's rulectx onto the current
580 parentctx, but does not commit them."""
580 parentctx, but does not commit them."""
581 repo = self.repo
581 repo = self.repo
582 rulectx = repo[self.node]
582 rulectx = repo[self.node]
583 with repo.ui.silent():
583 with repo.ui.silent():
584 hg.update(repo, self.state.parentctxnode, quietempty=True)
584 hg.update(repo, self.state.parentctxnode, quietempty=True)
585 stats = applychanges(repo.ui, repo, rulectx, {})
585 stats = applychanges(repo.ui, repo, rulectx, {})
586 repo.dirstate.setbranch(rulectx.branch())
586 repo.dirstate.setbranch(rulectx.branch())
587 if stats.unresolvedcount:
587 if stats.unresolvedcount:
588 raise error.InterventionRequired(
588 raise error.InterventionRequired(
589 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
589 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
590 hint=_(b'hg histedit --continue to resume'),
590 hint=_(b'hg histedit --continue to resume'),
591 )
591 )
592
592
593 def continuedirty(self):
593 def continuedirty(self):
594 """Continues the action when changes have been applied to the working
594 """Continues the action when changes have been applied to the working
595 copy. The default behavior is to commit the dirty changes."""
595 copy. The default behavior is to commit the dirty changes."""
596 repo = self.repo
596 repo = self.repo
597 rulectx = repo[self.node]
597 rulectx = repo[self.node]
598
598
599 editor = self.commiteditor()
599 editor = self.commiteditor()
600 commit = commitfuncfor(repo, rulectx)
600 commit = commitfuncfor(repo, rulectx)
601 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
601 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
602 date = dateutil.makedate()
602 date = dateutil.makedate()
603 else:
603 else:
604 date = rulectx.date()
604 date = rulectx.date()
605 commit(
605 commit(
606 text=rulectx.description(),
606 text=rulectx.description(),
607 user=rulectx.user(),
607 user=rulectx.user(),
608 date=date,
608 date=date,
609 extra=rulectx.extra(),
609 extra=rulectx.extra(),
610 editor=editor,
610 editor=editor,
611 )
611 )
612
612
613 def commiteditor(self):
613 def commiteditor(self):
614 """The editor to be used to edit the commit message."""
614 """The editor to be used to edit the commit message."""
615 return False
615 return False
616
616
617 def continueclean(self):
617 def continueclean(self):
618 """Continues the action when the working copy is clean. The default
618 """Continues the action when the working copy is clean. The default
619 behavior is to accept the current commit as the new version of the
619 behavior is to accept the current commit as the new version of the
620 rulectx."""
620 rulectx."""
621 ctx = self.repo[b'.']
621 ctx = self.repo[b'.']
622 if ctx.node() == self.state.parentctxnode:
622 if ctx.node() == self.state.parentctxnode:
623 self.repo.ui.warn(
623 self.repo.ui.warn(
624 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
624 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
625 )
625 )
626 return ctx, [(self.node, tuple())]
626 return ctx, [(self.node, tuple())]
627 if ctx.node() == self.node:
627 if ctx.node() == self.node:
628 # Nothing changed
628 # Nothing changed
629 return ctx, []
629 return ctx, []
630 return ctx, [(self.node, (ctx.node(),))]
630 return ctx, [(self.node, (ctx.node(),))]
631
631
632
632
633 def commitfuncfor(repo, src):
633 def commitfuncfor(repo, src):
634 """Build a commit function for the replacement of <src>
634 """Build a commit function for the replacement of <src>
635
635
636 This function ensure we apply the same treatment to all changesets.
636 This function ensure we apply the same treatment to all changesets.
637
637
638 - Add a 'histedit_source' entry in extra.
638 - Add a 'histedit_source' entry in extra.
639
639
640 Note that fold has its own separated logic because its handling is a bit
640 Note that fold has its own separated logic because its handling is a bit
641 different and not easily factored out of the fold method.
641 different and not easily factored out of the fold method.
642 """
642 """
643 phasemin = src.phase()
643 phasemin = src.phase()
644
644
645 def commitfunc(**kwargs):
645 def commitfunc(**kwargs):
646 overrides = {(b'phases', b'new-commit'): phasemin}
646 overrides = {(b'phases', b'new-commit'): phasemin}
647 with repo.ui.configoverride(overrides, b'histedit'):
647 with repo.ui.configoverride(overrides, b'histedit'):
648 extra = kwargs.get('extra', {}).copy()
648 extra = kwargs.get('extra', {}).copy()
649 extra[b'histedit_source'] = src.hex()
649 extra[b'histedit_source'] = src.hex()
650 kwargs['extra'] = extra
650 kwargs['extra'] = extra
651 return repo.commit(**kwargs)
651 return repo.commit(**kwargs)
652
652
653 return commitfunc
653 return commitfunc
654
654
655
655
656 def applychanges(ui, repo, ctx, opts):
656 def applychanges(ui, repo, ctx, opts):
657 """Merge changeset from ctx (only) in the current working directory"""
657 """Merge changeset from ctx (only) in the current working directory"""
658 if ctx.p1().node() == repo.dirstate.p1():
658 if ctx.p1().node() == repo.dirstate.p1():
659 # edits are "in place" we do not need to make any merge,
659 # edits are "in place" we do not need to make any merge,
660 # just applies changes on parent for editing
660 # just applies changes on parent for editing
661 with ui.silent():
661 with ui.silent():
662 cmdutil.revert(ui, repo, ctx, all=True)
662 cmdutil.revert(ui, repo, ctx, all=True)
663 stats = mergemod.updateresult(0, 0, 0, 0)
663 stats = mergemod.updateresult(0, 0, 0, 0)
664 else:
664 else:
665 try:
665 try:
666 # ui.forcemerge is an internal variable, do not document
666 # ui.forcemerge is an internal variable, do not document
667 repo.ui.setconfig(
667 repo.ui.setconfig(
668 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
668 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
669 )
669 )
670 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
670 stats = mergemod.graft(
671 repo,
672 ctx,
673 labels=[
674 b'already edited',
675 b'current change',
676 b'parent of current change',
677 ],
678 )
671 finally:
679 finally:
672 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
680 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
673 return stats
681 return stats
674
682
675
683
676 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
684 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
677 """collapse the set of revisions from first to last as new one.
685 """collapse the set of revisions from first to last as new one.
678
686
679 Expected commit options are:
687 Expected commit options are:
680 - message
688 - message
681 - date
689 - date
682 - username
690 - username
683 Commit message is edited in all cases.
691 Commit message is edited in all cases.
684
692
685 This function works in memory."""
693 This function works in memory."""
686 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
694 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
687 if not ctxs:
695 if not ctxs:
688 return None
696 return None
689 for c in ctxs:
697 for c in ctxs:
690 if not c.mutable():
698 if not c.mutable():
691 raise error.ParseError(
699 raise error.ParseError(
692 _(b"cannot fold into public change %s") % short(c.node())
700 _(b"cannot fold into public change %s") % short(c.node())
693 )
701 )
694 base = firstctx.p1()
702 base = firstctx.p1()
695
703
696 # commit a new version of the old changeset, including the update
704 # commit a new version of the old changeset, including the update
697 # collect all files which might be affected
705 # collect all files which might be affected
698 files = set()
706 files = set()
699 for ctx in ctxs:
707 for ctx in ctxs:
700 files.update(ctx.files())
708 files.update(ctx.files())
701
709
702 # Recompute copies (avoid recording a -> b -> a)
710 # Recompute copies (avoid recording a -> b -> a)
703 copied = copies.pathcopies(base, lastctx)
711 copied = copies.pathcopies(base, lastctx)
704
712
705 # prune files which were reverted by the updates
713 # prune files which were reverted by the updates
706 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
714 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
707 # commit version of these files as defined by head
715 # commit version of these files as defined by head
708 headmf = lastctx.manifest()
716 headmf = lastctx.manifest()
709
717
710 def filectxfn(repo, ctx, path):
718 def filectxfn(repo, ctx, path):
711 if path in headmf:
719 if path in headmf:
712 fctx = lastctx[path]
720 fctx = lastctx[path]
713 flags = fctx.flags()
721 flags = fctx.flags()
714 mctx = context.memfilectx(
722 mctx = context.memfilectx(
715 repo,
723 repo,
716 ctx,
724 ctx,
717 fctx.path(),
725 fctx.path(),
718 fctx.data(),
726 fctx.data(),
719 islink=b'l' in flags,
727 islink=b'l' in flags,
720 isexec=b'x' in flags,
728 isexec=b'x' in flags,
721 copysource=copied.get(path),
729 copysource=copied.get(path),
722 )
730 )
723 return mctx
731 return mctx
724 return None
732 return None
725
733
726 if commitopts.get(b'message'):
734 if commitopts.get(b'message'):
727 message = commitopts[b'message']
735 message = commitopts[b'message']
728 else:
736 else:
729 message = firstctx.description()
737 message = firstctx.description()
730 user = commitopts.get(b'user')
738 user = commitopts.get(b'user')
731 date = commitopts.get(b'date')
739 date = commitopts.get(b'date')
732 extra = commitopts.get(b'extra')
740 extra = commitopts.get(b'extra')
733
741
734 parents = (firstctx.p1().node(), firstctx.p2().node())
742 parents = (firstctx.p1().node(), firstctx.p2().node())
735 editor = None
743 editor = None
736 if not skipprompt:
744 if not skipprompt:
737 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
745 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
738 new = context.memctx(
746 new = context.memctx(
739 repo,
747 repo,
740 parents=parents,
748 parents=parents,
741 text=message,
749 text=message,
742 files=files,
750 files=files,
743 filectxfn=filectxfn,
751 filectxfn=filectxfn,
744 user=user,
752 user=user,
745 date=date,
753 date=date,
746 extra=extra,
754 extra=extra,
747 editor=editor,
755 editor=editor,
748 )
756 )
749 return repo.commitctx(new)
757 return repo.commitctx(new)
750
758
751
759
752 def _isdirtywc(repo):
760 def _isdirtywc(repo):
753 return repo[None].dirty(missing=True)
761 return repo[None].dirty(missing=True)
754
762
755
763
756 def abortdirty():
764 def abortdirty():
757 raise error.StateError(
765 raise error.StateError(
758 _(b'working copy has pending changes'),
766 _(b'working copy has pending changes'),
759 hint=_(
767 hint=_(
760 b'amend, commit, or revert them and run histedit '
768 b'amend, commit, or revert them and run histedit '
761 b'--continue, or abort with histedit --abort'
769 b'--continue, or abort with histedit --abort'
762 ),
770 ),
763 )
771 )
764
772
765
773
766 def action(verbs, message, priority=False, internal=False):
774 def action(verbs, message, priority=False, internal=False):
767 def wrap(cls):
775 def wrap(cls):
768 assert not priority or not internal
776 assert not priority or not internal
769 verb = verbs[0]
777 verb = verbs[0]
770 if priority:
778 if priority:
771 primaryactions.add(verb)
779 primaryactions.add(verb)
772 elif internal:
780 elif internal:
773 internalactions.add(verb)
781 internalactions.add(verb)
774 elif len(verbs) > 1:
782 elif len(verbs) > 1:
775 secondaryactions.add(verb)
783 secondaryactions.add(verb)
776 else:
784 else:
777 tertiaryactions.add(verb)
785 tertiaryactions.add(verb)
778
786
779 cls.verb = verb
787 cls.verb = verb
780 cls.verbs = verbs
788 cls.verbs = verbs
781 cls.message = message
789 cls.message = message
782 for verb in verbs:
790 for verb in verbs:
783 actiontable[verb] = cls
791 actiontable[verb] = cls
784 return cls
792 return cls
785
793
786 return wrap
794 return wrap
787
795
788
796
789 @action([b'pick', b'p'], _(b'use commit'), priority=True)
797 @action([b'pick', b'p'], _(b'use commit'), priority=True)
790 class pick(histeditaction):
798 class pick(histeditaction):
791 def run(self):
799 def run(self):
792 rulectx = self.repo[self.node]
800 rulectx = self.repo[self.node]
793 if rulectx.p1().node() == self.state.parentctxnode:
801 if rulectx.p1().node() == self.state.parentctxnode:
794 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
802 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
795 return rulectx, []
803 return rulectx, []
796
804
797 return super(pick, self).run()
805 return super(pick, self).run()
798
806
799
807
800 @action(
808 @action(
801 [b'edit', b'e'],
809 [b'edit', b'e'],
802 _(b'use commit, but allow edits before making new commit'),
810 _(b'use commit, but allow edits before making new commit'),
803 priority=True,
811 priority=True,
804 )
812 )
805 class edit(histeditaction):
813 class edit(histeditaction):
806 def run(self):
814 def run(self):
807 repo = self.repo
815 repo = self.repo
808 rulectx = repo[self.node]
816 rulectx = repo[self.node]
809 hg.update(repo, self.state.parentctxnode, quietempty=True)
817 hg.update(repo, self.state.parentctxnode, quietempty=True)
810 applychanges(repo.ui, repo, rulectx, {})
818 applychanges(repo.ui, repo, rulectx, {})
811 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
819 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
812 raise error.InterventionRequired(
820 raise error.InterventionRequired(
813 _(b'Editing (%s), commit as needed now to split the change')
821 _(b'Editing (%s), commit as needed now to split the change')
814 % short(self.node),
822 % short(self.node),
815 hint=hint % short(self.node),
823 hint=hint % short(self.node),
816 )
824 )
817
825
818 def commiteditor(self):
826 def commiteditor(self):
819 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
827 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
820
828
821
829
822 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
830 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
823 class fold(histeditaction):
831 class fold(histeditaction):
824 def verify(self, prev, expected, seen):
832 def verify(self, prev, expected, seen):
825 """Verifies semantic correctness of the fold rule"""
833 """Verifies semantic correctness of the fold rule"""
826 super(fold, self).verify(prev, expected, seen)
834 super(fold, self).verify(prev, expected, seen)
827 repo = self.repo
835 repo = self.repo
828 if not prev:
836 if not prev:
829 c = repo[self.node].p1()
837 c = repo[self.node].p1()
830 elif not prev.verb in (b'pick', b'base'):
838 elif not prev.verb in (b'pick', b'base'):
831 return
839 return
832 else:
840 else:
833 c = repo[prev.node]
841 c = repo[prev.node]
834 if not c.mutable():
842 if not c.mutable():
835 raise error.ParseError(
843 raise error.ParseError(
836 _(b"cannot fold into public change %s") % short(c.node())
844 _(b"cannot fold into public change %s") % short(c.node())
837 )
845 )
838
846
839 def continuedirty(self):
847 def continuedirty(self):
840 repo = self.repo
848 repo = self.repo
841 rulectx = repo[self.node]
849 rulectx = repo[self.node]
842
850
843 commit = commitfuncfor(repo, rulectx)
851 commit = commitfuncfor(repo, rulectx)
844 commit(
852 commit(
845 text=b'fold-temp-revision %s' % short(self.node),
853 text=b'fold-temp-revision %s' % short(self.node),
846 user=rulectx.user(),
854 user=rulectx.user(),
847 date=rulectx.date(),
855 date=rulectx.date(),
848 extra=rulectx.extra(),
856 extra=rulectx.extra(),
849 )
857 )
850
858
851 def continueclean(self):
859 def continueclean(self):
852 repo = self.repo
860 repo = self.repo
853 ctx = repo[b'.']
861 ctx = repo[b'.']
854 rulectx = repo[self.node]
862 rulectx = repo[self.node]
855 parentctxnode = self.state.parentctxnode
863 parentctxnode = self.state.parentctxnode
856 if ctx.node() == parentctxnode:
864 if ctx.node() == parentctxnode:
857 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
865 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
858 return ctx, [(self.node, (parentctxnode,))]
866 return ctx, [(self.node, (parentctxnode,))]
859
867
860 parentctx = repo[parentctxnode]
868 parentctx = repo[parentctxnode]
861 newcommits = {
869 newcommits = {
862 c.node()
870 c.node()
863 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
871 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
864 }
872 }
865 if not newcommits:
873 if not newcommits:
866 repo.ui.warn(
874 repo.ui.warn(
867 _(
875 _(
868 b'%s: cannot fold - working copy is not a '
876 b'%s: cannot fold - working copy is not a '
869 b'descendant of previous commit %s\n'
877 b'descendant of previous commit %s\n'
870 )
878 )
871 % (short(self.node), short(parentctxnode))
879 % (short(self.node), short(parentctxnode))
872 )
880 )
873 return ctx, [(self.node, (ctx.node(),))]
881 return ctx, [(self.node, (ctx.node(),))]
874
882
875 middlecommits = newcommits.copy()
883 middlecommits = newcommits.copy()
876 middlecommits.discard(ctx.node())
884 middlecommits.discard(ctx.node())
877
885
878 return self.finishfold(
886 return self.finishfold(
879 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
887 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
880 )
888 )
881
889
882 def skipprompt(self):
890 def skipprompt(self):
883 """Returns true if the rule should skip the message editor.
891 """Returns true if the rule should skip the message editor.
884
892
885 For example, 'fold' wants to show an editor, but 'rollup'
893 For example, 'fold' wants to show an editor, but 'rollup'
886 doesn't want to.
894 doesn't want to.
887 """
895 """
888 return False
896 return False
889
897
890 def mergedescs(self):
898 def mergedescs(self):
891 """Returns true if the rule should merge messages of multiple changes.
899 """Returns true if the rule should merge messages of multiple changes.
892
900
893 This exists mainly so that 'rollup' rules can be a subclass of
901 This exists mainly so that 'rollup' rules can be a subclass of
894 'fold'.
902 'fold'.
895 """
903 """
896 return True
904 return True
897
905
898 def firstdate(self):
906 def firstdate(self):
899 """Returns true if the rule should preserve the date of the first
907 """Returns true if the rule should preserve the date of the first
900 change.
908 change.
901
909
902 This exists mainly so that 'rollup' rules can be a subclass of
910 This exists mainly so that 'rollup' rules can be a subclass of
903 'fold'.
911 'fold'.
904 """
912 """
905 return False
913 return False
906
914
907 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
915 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
908 mergemod.update(ctx.p1())
916 mergemod.update(ctx.p1())
909 ### prepare new commit data
917 ### prepare new commit data
910 commitopts = {}
918 commitopts = {}
911 commitopts[b'user'] = ctx.user()
919 commitopts[b'user'] = ctx.user()
912 # commit message
920 # commit message
913 if not self.mergedescs():
921 if not self.mergedescs():
914 newmessage = ctx.description()
922 newmessage = ctx.description()
915 else:
923 else:
916 newmessage = (
924 newmessage = (
917 b'\n***\n'.join(
925 b'\n***\n'.join(
918 [ctx.description()]
926 [ctx.description()]
919 + [repo[r].description() for r in internalchanges]
927 + [repo[r].description() for r in internalchanges]
920 + [oldctx.description()]
928 + [oldctx.description()]
921 )
929 )
922 + b'\n'
930 + b'\n'
923 )
931 )
924 commitopts[b'message'] = newmessage
932 commitopts[b'message'] = newmessage
925 # date
933 # date
926 if self.firstdate():
934 if self.firstdate():
927 commitopts[b'date'] = ctx.date()
935 commitopts[b'date'] = ctx.date()
928 else:
936 else:
929 commitopts[b'date'] = max(ctx.date(), oldctx.date())
937 commitopts[b'date'] = max(ctx.date(), oldctx.date())
930 # if date is to be updated to current
938 # if date is to be updated to current
931 if ui.configbool(b'rewrite', b'update-timestamp'):
939 if ui.configbool(b'rewrite', b'update-timestamp'):
932 commitopts[b'date'] = dateutil.makedate()
940 commitopts[b'date'] = dateutil.makedate()
933
941
934 extra = ctx.extra().copy()
942 extra = ctx.extra().copy()
935 # histedit_source
943 # histedit_source
936 # note: ctx is likely a temporary commit but that the best we can do
944 # note: ctx is likely a temporary commit but that the best we can do
937 # here. This is sufficient to solve issue3681 anyway.
945 # here. This is sufficient to solve issue3681 anyway.
938 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
946 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
939 commitopts[b'extra'] = extra
947 commitopts[b'extra'] = extra
940 phasemin = max(ctx.phase(), oldctx.phase())
948 phasemin = max(ctx.phase(), oldctx.phase())
941 overrides = {(b'phases', b'new-commit'): phasemin}
949 overrides = {(b'phases', b'new-commit'): phasemin}
942 with repo.ui.configoverride(overrides, b'histedit'):
950 with repo.ui.configoverride(overrides, b'histedit'):
943 n = collapse(
951 n = collapse(
944 repo,
952 repo,
945 ctx,
953 ctx,
946 repo[newnode],
954 repo[newnode],
947 commitopts,
955 commitopts,
948 skipprompt=self.skipprompt(),
956 skipprompt=self.skipprompt(),
949 )
957 )
950 if n is None:
958 if n is None:
951 return ctx, []
959 return ctx, []
952 mergemod.update(repo[n])
960 mergemod.update(repo[n])
953 replacements = [
961 replacements = [
954 (oldctx.node(), (newnode,)),
962 (oldctx.node(), (newnode,)),
955 (ctx.node(), (n,)),
963 (ctx.node(), (n,)),
956 (newnode, (n,)),
964 (newnode, (n,)),
957 ]
965 ]
958 for ich in internalchanges:
966 for ich in internalchanges:
959 replacements.append((ich, (n,)))
967 replacements.append((ich, (n,)))
960 return repo[n], replacements
968 return repo[n], replacements
961
969
962
970
963 @action(
971 @action(
964 [b'base', b'b'],
972 [b'base', b'b'],
965 _(b'checkout changeset and apply further changesets from there'),
973 _(b'checkout changeset and apply further changesets from there'),
966 )
974 )
967 class base(histeditaction):
975 class base(histeditaction):
968 def run(self):
976 def run(self):
969 if self.repo[b'.'].node() != self.node:
977 if self.repo[b'.'].node() != self.node:
970 mergemod.clean_update(self.repo[self.node])
978 mergemod.clean_update(self.repo[self.node])
971 return self.continueclean()
979 return self.continueclean()
972
980
973 def continuedirty(self):
981 def continuedirty(self):
974 abortdirty()
982 abortdirty()
975
983
976 def continueclean(self):
984 def continueclean(self):
977 basectx = self.repo[b'.']
985 basectx = self.repo[b'.']
978 return basectx, []
986 return basectx, []
979
987
980 def _verifynodeconstraints(self, prev, expected, seen):
988 def _verifynodeconstraints(self, prev, expected, seen):
981 # base can only be use with a node not in the edited set
989 # base can only be use with a node not in the edited set
982 if self.node in expected:
990 if self.node in expected:
983 msg = _(b'%s "%s" changeset was an edited list candidate')
991 msg = _(b'%s "%s" changeset was an edited list candidate')
984 raise error.ParseError(
992 raise error.ParseError(
985 msg % (self.verb, short(self.node)),
993 msg % (self.verb, short(self.node)),
986 hint=_(b'base must only use unlisted changesets'),
994 hint=_(b'base must only use unlisted changesets'),
987 )
995 )
988
996
989
997
990 @action(
998 @action(
991 [b'_multifold'],
999 [b'_multifold'],
992 _(
1000 _(
993 """fold subclass used for when multiple folds happen in a row
1001 """fold subclass used for when multiple folds happen in a row
994
1002
995 We only want to fire the editor for the folded message once when
1003 We only want to fire the editor for the folded message once when
996 (say) four changes are folded down into a single change. This is
1004 (say) four changes are folded down into a single change. This is
997 similar to rollup, but we should preserve both messages so that
1005 similar to rollup, but we should preserve both messages so that
998 when the last fold operation runs we can show the user all the
1006 when the last fold operation runs we can show the user all the
999 commit messages in their editor.
1007 commit messages in their editor.
1000 """
1008 """
1001 ),
1009 ),
1002 internal=True,
1010 internal=True,
1003 )
1011 )
1004 class _multifold(fold):
1012 class _multifold(fold):
1005 def skipprompt(self):
1013 def skipprompt(self):
1006 return True
1014 return True
1007
1015
1008
1016
1009 @action(
1017 @action(
1010 [b"roll", b"r"],
1018 [b"roll", b"r"],
1011 _(b"like fold, but discard this commit's description and date"),
1019 _(b"like fold, but discard this commit's description and date"),
1012 )
1020 )
1013 class rollup(fold):
1021 class rollup(fold):
1014 def mergedescs(self):
1022 def mergedescs(self):
1015 return False
1023 return False
1016
1024
1017 def skipprompt(self):
1025 def skipprompt(self):
1018 return True
1026 return True
1019
1027
1020 def firstdate(self):
1028 def firstdate(self):
1021 return True
1029 return True
1022
1030
1023
1031
1024 @action([b"drop", b"d"], _(b'remove commit from history'))
1032 @action([b"drop", b"d"], _(b'remove commit from history'))
1025 class drop(histeditaction):
1033 class drop(histeditaction):
1026 def run(self):
1034 def run(self):
1027 parentctx = self.repo[self.state.parentctxnode]
1035 parentctx = self.repo[self.state.parentctxnode]
1028 return parentctx, [(self.node, tuple())]
1036 return parentctx, [(self.node, tuple())]
1029
1037
1030
1038
1031 @action(
1039 @action(
1032 [b"mess", b"m"],
1040 [b"mess", b"m"],
1033 _(b'edit commit message without changing commit content'),
1041 _(b'edit commit message without changing commit content'),
1034 priority=True,
1042 priority=True,
1035 )
1043 )
1036 class message(histeditaction):
1044 class message(histeditaction):
1037 def commiteditor(self):
1045 def commiteditor(self):
1038 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1046 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1039
1047
1040
1048
1041 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1049 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1042 """utility function to find the first outgoing changeset
1050 """utility function to find the first outgoing changeset
1043
1051
1044 Used by initialization code"""
1052 Used by initialization code"""
1045 if opts is None:
1053 if opts is None:
1046 opts = {}
1054 opts = {}
1047 path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
1055 path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
1048 dest = path.pushloc or path.loc
1056 dest = path.pushloc or path.loc
1049
1057
1050 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
1058 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
1051
1059
1052 revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
1060 revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
1053 other = hg.peer(repo, opts, dest)
1061 other = hg.peer(repo, opts, dest)
1054
1062
1055 if revs:
1063 if revs:
1056 revs = [repo.lookup(rev) for rev in revs]
1064 revs = [repo.lookup(rev) for rev in revs]
1057
1065
1058 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1066 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1059 if not outgoing.missing:
1067 if not outgoing.missing:
1060 raise error.StateError(_(b'no outgoing ancestors'))
1068 raise error.StateError(_(b'no outgoing ancestors'))
1061 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1069 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1062 if len(roots) > 1:
1070 if len(roots) > 1:
1063 msg = _(b'there are ambiguous outgoing revisions')
1071 msg = _(b'there are ambiguous outgoing revisions')
1064 hint = _(b"see 'hg help histedit' for more detail")
1072 hint = _(b"see 'hg help histedit' for more detail")
1065 raise error.StateError(msg, hint=hint)
1073 raise error.StateError(msg, hint=hint)
1066 return repo[roots[0]].node()
1074 return repo[roots[0]].node()
1067
1075
1068
1076
1069 # Curses Support
1077 # Curses Support
1070 try:
1078 try:
1071 import curses
1079 import curses
1072 except ImportError:
1080 except ImportError:
1073 curses = None
1081 curses = None
1074
1082
1075 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1083 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1076 ACTION_LABELS = {
1084 ACTION_LABELS = {
1077 b'fold': b'^fold',
1085 b'fold': b'^fold',
1078 b'roll': b'^roll',
1086 b'roll': b'^roll',
1079 }
1087 }
1080
1088
1081 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1089 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1082 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1090 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1083 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1091 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1084
1092
1085 E_QUIT, E_HISTEDIT = 1, 2
1093 E_QUIT, E_HISTEDIT = 1, 2
1086 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1094 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1087 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1095 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1088
1096
1089 KEYTABLE = {
1097 KEYTABLE = {
1090 b'global': {
1098 b'global': {
1091 b'h': b'next-action',
1099 b'h': b'next-action',
1092 b'KEY_RIGHT': b'next-action',
1100 b'KEY_RIGHT': b'next-action',
1093 b'l': b'prev-action',
1101 b'l': b'prev-action',
1094 b'KEY_LEFT': b'prev-action',
1102 b'KEY_LEFT': b'prev-action',
1095 b'q': b'quit',
1103 b'q': b'quit',
1096 b'c': b'histedit',
1104 b'c': b'histedit',
1097 b'C': b'histedit',
1105 b'C': b'histedit',
1098 b'v': b'showpatch',
1106 b'v': b'showpatch',
1099 b'?': b'help',
1107 b'?': b'help',
1100 },
1108 },
1101 MODE_RULES: {
1109 MODE_RULES: {
1102 b'd': b'action-drop',
1110 b'd': b'action-drop',
1103 b'e': b'action-edit',
1111 b'e': b'action-edit',
1104 b'f': b'action-fold',
1112 b'f': b'action-fold',
1105 b'm': b'action-mess',
1113 b'm': b'action-mess',
1106 b'p': b'action-pick',
1114 b'p': b'action-pick',
1107 b'r': b'action-roll',
1115 b'r': b'action-roll',
1108 b' ': b'select',
1116 b' ': b'select',
1109 b'j': b'down',
1117 b'j': b'down',
1110 b'k': b'up',
1118 b'k': b'up',
1111 b'KEY_DOWN': b'down',
1119 b'KEY_DOWN': b'down',
1112 b'KEY_UP': b'up',
1120 b'KEY_UP': b'up',
1113 b'J': b'move-down',
1121 b'J': b'move-down',
1114 b'K': b'move-up',
1122 b'K': b'move-up',
1115 b'KEY_NPAGE': b'move-down',
1123 b'KEY_NPAGE': b'move-down',
1116 b'KEY_PPAGE': b'move-up',
1124 b'KEY_PPAGE': b'move-up',
1117 b'0': b'goto', # Used for 0..9
1125 b'0': b'goto', # Used for 0..9
1118 },
1126 },
1119 MODE_PATCH: {
1127 MODE_PATCH: {
1120 b' ': b'page-down',
1128 b' ': b'page-down',
1121 b'KEY_NPAGE': b'page-down',
1129 b'KEY_NPAGE': b'page-down',
1122 b'KEY_PPAGE': b'page-up',
1130 b'KEY_PPAGE': b'page-up',
1123 b'j': b'line-down',
1131 b'j': b'line-down',
1124 b'k': b'line-up',
1132 b'k': b'line-up',
1125 b'KEY_DOWN': b'line-down',
1133 b'KEY_DOWN': b'line-down',
1126 b'KEY_UP': b'line-up',
1134 b'KEY_UP': b'line-up',
1127 b'J': b'down',
1135 b'J': b'down',
1128 b'K': b'up',
1136 b'K': b'up',
1129 },
1137 },
1130 MODE_HELP: {},
1138 MODE_HELP: {},
1131 }
1139 }
1132
1140
1133
1141
1134 def screen_size():
1142 def screen_size():
1135 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1143 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1136
1144
1137
1145
1138 class histeditrule(object):
1146 class histeditrule(object):
1139 def __init__(self, ui, ctx, pos, action=b'pick'):
1147 def __init__(self, ui, ctx, pos, action=b'pick'):
1140 self.ui = ui
1148 self.ui = ui
1141 self.ctx = ctx
1149 self.ctx = ctx
1142 self.action = action
1150 self.action = action
1143 self.origpos = pos
1151 self.origpos = pos
1144 self.pos = pos
1152 self.pos = pos
1145 self.conflicts = []
1153 self.conflicts = []
1146
1154
1147 def __bytes__(self):
1155 def __bytes__(self):
1148 # Example display of several histeditrules:
1156 # Example display of several histeditrules:
1149 #
1157 #
1150 # #10 pick 316392:06a16c25c053 add option to skip tests
1158 # #10 pick 316392:06a16c25c053 add option to skip tests
1151 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1159 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1152 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1160 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1153 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1161 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1154 #
1162 #
1155 # The carets point to the changeset being folded into ("roll this
1163 # The carets point to the changeset being folded into ("roll this
1156 # changeset into the changeset above").
1164 # changeset into the changeset above").
1157 return b'%s%s' % (self.prefix, self.desc)
1165 return b'%s%s' % (self.prefix, self.desc)
1158
1166
1159 __str__ = encoding.strmethod(__bytes__)
1167 __str__ = encoding.strmethod(__bytes__)
1160
1168
1161 @property
1169 @property
1162 def prefix(self):
1170 def prefix(self):
1163 # Some actions ('fold' and 'roll') combine a patch with a
1171 # Some actions ('fold' and 'roll') combine a patch with a
1164 # previous one. Add a marker showing which patch they apply
1172 # previous one. Add a marker showing which patch they apply
1165 # to.
1173 # to.
1166 action = ACTION_LABELS.get(self.action, self.action)
1174 action = ACTION_LABELS.get(self.action, self.action)
1167
1175
1168 h = self.ctx.hex()[0:12]
1176 h = self.ctx.hex()[0:12]
1169 r = self.ctx.rev()
1177 r = self.ctx.rev()
1170
1178
1171 return b"#%s %s %d:%s " % (
1179 return b"#%s %s %d:%s " % (
1172 (b'%d' % self.origpos).ljust(2),
1180 (b'%d' % self.origpos).ljust(2),
1173 action.ljust(6),
1181 action.ljust(6),
1174 r,
1182 r,
1175 h,
1183 h,
1176 )
1184 )
1177
1185
1178 @util.propertycache
1186 @util.propertycache
1179 def desc(self):
1187 def desc(self):
1180 summary = cmdutil.rendertemplate(
1188 summary = cmdutil.rendertemplate(
1181 self.ctx, self.ui.config(b'histedit', b'summary-template')
1189 self.ctx, self.ui.config(b'histedit', b'summary-template')
1182 )
1190 )
1183 if summary:
1191 if summary:
1184 return summary
1192 return summary
1185 # This is split off from the prefix property so that we can
1193 # This is split off from the prefix property so that we can
1186 # separately make the description for 'roll' red (since it
1194 # separately make the description for 'roll' red (since it
1187 # will get discarded).
1195 # will get discarded).
1188 return self.ctx.description().splitlines()[0].strip()
1196 return self.ctx.description().splitlines()[0].strip()
1189
1197
1190 def checkconflicts(self, other):
1198 def checkconflicts(self, other):
1191 if other.pos > self.pos and other.origpos <= self.origpos:
1199 if other.pos > self.pos and other.origpos <= self.origpos:
1192 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1200 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1193 self.conflicts.append(other)
1201 self.conflicts.append(other)
1194 return self.conflicts
1202 return self.conflicts
1195
1203
1196 if other in self.conflicts:
1204 if other in self.conflicts:
1197 self.conflicts.remove(other)
1205 self.conflicts.remove(other)
1198 return self.conflicts
1206 return self.conflicts
1199
1207
1200
1208
1201 def makecommands(rules):
1209 def makecommands(rules):
1202 """Returns a list of commands consumable by histedit --commands based on
1210 """Returns a list of commands consumable by histedit --commands based on
1203 our list of rules"""
1211 our list of rules"""
1204 commands = []
1212 commands = []
1205 for rules in rules:
1213 for rules in rules:
1206 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1214 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1207 return commands
1215 return commands
1208
1216
1209
1217
1210 def addln(win, y, x, line, color=None):
1218 def addln(win, y, x, line, color=None):
1211 """Add a line to the given window left padding but 100% filled with
1219 """Add a line to the given window left padding but 100% filled with
1212 whitespace characters, so that the color appears on the whole line"""
1220 whitespace characters, so that the color appears on the whole line"""
1213 maxy, maxx = win.getmaxyx()
1221 maxy, maxx = win.getmaxyx()
1214 length = maxx - 1 - x
1222 length = maxx - 1 - x
1215 line = bytes(line).ljust(length)[:length]
1223 line = bytes(line).ljust(length)[:length]
1216 if y < 0:
1224 if y < 0:
1217 y = maxy + y
1225 y = maxy + y
1218 if x < 0:
1226 if x < 0:
1219 x = maxx + x
1227 x = maxx + x
1220 if color:
1228 if color:
1221 win.addstr(y, x, line, color)
1229 win.addstr(y, x, line, color)
1222 else:
1230 else:
1223 win.addstr(y, x, line)
1231 win.addstr(y, x, line)
1224
1232
1225
1233
1226 def _trunc_head(line, n):
1234 def _trunc_head(line, n):
1227 if len(line) <= n:
1235 if len(line) <= n:
1228 return line
1236 return line
1229 return b'> ' + line[-(n - 2) :]
1237 return b'> ' + line[-(n - 2) :]
1230
1238
1231
1239
1232 def _trunc_tail(line, n):
1240 def _trunc_tail(line, n):
1233 if len(line) <= n:
1241 if len(line) <= n:
1234 return line
1242 return line
1235 return line[: n - 2] + b' >'
1243 return line[: n - 2] + b' >'
1236
1244
1237
1245
1238 class _chistedit_state(object):
1246 class _chistedit_state(object):
1239 def __init__(
1247 def __init__(
1240 self,
1248 self,
1241 repo,
1249 repo,
1242 rules,
1250 rules,
1243 stdscr,
1251 stdscr,
1244 ):
1252 ):
1245 self.repo = repo
1253 self.repo = repo
1246 self.rules = rules
1254 self.rules = rules
1247 self.stdscr = stdscr
1255 self.stdscr = stdscr
1248 self.later_on_top = repo.ui.configbool(
1256 self.later_on_top = repo.ui.configbool(
1249 b'histedit', b'later-commits-first'
1257 b'histedit', b'later-commits-first'
1250 )
1258 )
1251 # The current item in display order, initialized to point to the top
1259 # The current item in display order, initialized to point to the top
1252 # of the screen.
1260 # of the screen.
1253 self.pos = 0
1261 self.pos = 0
1254 self.selected = None
1262 self.selected = None
1255 self.mode = (MODE_INIT, MODE_INIT)
1263 self.mode = (MODE_INIT, MODE_INIT)
1256 self.page_height = None
1264 self.page_height = None
1257 self.modes = {
1265 self.modes = {
1258 MODE_RULES: {
1266 MODE_RULES: {
1259 b'line_offset': 0,
1267 b'line_offset': 0,
1260 },
1268 },
1261 MODE_PATCH: {
1269 MODE_PATCH: {
1262 b'line_offset': 0,
1270 b'line_offset': 0,
1263 },
1271 },
1264 }
1272 }
1265
1273
1266 def render_commit(self, win):
1274 def render_commit(self, win):
1267 """Renders the commit window that shows the log of the current selected
1275 """Renders the commit window that shows the log of the current selected
1268 commit"""
1276 commit"""
1269 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1277 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1270
1278
1271 ctx = rule.ctx
1279 ctx = rule.ctx
1272 win.box()
1280 win.box()
1273
1281
1274 maxy, maxx = win.getmaxyx()
1282 maxy, maxx = win.getmaxyx()
1275 length = maxx - 3
1283 length = maxx - 3
1276
1284
1277 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1285 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1278 win.addstr(1, 1, line[:length])
1286 win.addstr(1, 1, line[:length])
1279
1287
1280 line = b"user: %s" % ctx.user()
1288 line = b"user: %s" % ctx.user()
1281 win.addstr(2, 1, line[:length])
1289 win.addstr(2, 1, line[:length])
1282
1290
1283 bms = self.repo.nodebookmarks(ctx.node())
1291 bms = self.repo.nodebookmarks(ctx.node())
1284 line = b"bookmark: %s" % b' '.join(bms)
1292 line = b"bookmark: %s" % b' '.join(bms)
1285 win.addstr(3, 1, line[:length])
1293 win.addstr(3, 1, line[:length])
1286
1294
1287 line = b"summary: %s" % (ctx.description().splitlines()[0])
1295 line = b"summary: %s" % (ctx.description().splitlines()[0])
1288 win.addstr(4, 1, line[:length])
1296 win.addstr(4, 1, line[:length])
1289
1297
1290 line = b"files: "
1298 line = b"files: "
1291 win.addstr(5, 1, line)
1299 win.addstr(5, 1, line)
1292 fnx = 1 + len(line)
1300 fnx = 1 + len(line)
1293 fnmaxx = length - fnx + 1
1301 fnmaxx = length - fnx + 1
1294 y = 5
1302 y = 5
1295 fnmaxn = maxy - (1 + y) - 1
1303 fnmaxn = maxy - (1 + y) - 1
1296 files = ctx.files()
1304 files = ctx.files()
1297 for i, line1 in enumerate(files):
1305 for i, line1 in enumerate(files):
1298 if len(files) > fnmaxn and i == fnmaxn - 1:
1306 if len(files) > fnmaxn and i == fnmaxn - 1:
1299 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1307 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1300 y = y + 1
1308 y = y + 1
1301 break
1309 break
1302 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1310 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1303 y = y + 1
1311 y = y + 1
1304
1312
1305 conflicts = rule.conflicts
1313 conflicts = rule.conflicts
1306 if len(conflicts) > 0:
1314 if len(conflicts) > 0:
1307 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1315 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1308 conflictstr = b"changed files overlap with %s" % conflictstr
1316 conflictstr = b"changed files overlap with %s" % conflictstr
1309 else:
1317 else:
1310 conflictstr = b'no overlap'
1318 conflictstr = b'no overlap'
1311
1319
1312 win.addstr(y, 1, conflictstr[:length])
1320 win.addstr(y, 1, conflictstr[:length])
1313 win.noutrefresh()
1321 win.noutrefresh()
1314
1322
1315 def helplines(self):
1323 def helplines(self):
1316 if self.mode[0] == MODE_PATCH:
1324 if self.mode[0] == MODE_PATCH:
1317 help = b"""\
1325 help = b"""\
1318 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1326 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1319 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1327 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1320 """
1328 """
1321 else:
1329 else:
1322 help = b"""\
1330 help = b"""\
1323 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1331 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1324 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1332 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1325 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1333 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1326 """
1334 """
1335 if self.later_on_top:
1336 help += b"Newer commits are shown above older commits.\n"
1337 else:
1338 help += b"Older commits are shown above newer commits.\n"
1327 return help.splitlines()
1339 return help.splitlines()
1328
1340
1329 def render_help(self, win):
1341 def render_help(self, win):
1330 maxy, maxx = win.getmaxyx()
1342 maxy, maxx = win.getmaxyx()
1331 for y, line in enumerate(self.helplines()):
1343 for y, line in enumerate(self.helplines()):
1332 if y >= maxy:
1344 if y >= maxy:
1333 break
1345 break
1334 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1346 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1335 win.noutrefresh()
1347 win.noutrefresh()
1336
1348
1337 def layout(self):
1349 def layout(self):
1338 maxy, maxx = self.stdscr.getmaxyx()
1350 maxy, maxx = self.stdscr.getmaxyx()
1339 helplen = len(self.helplines())
1351 helplen = len(self.helplines())
1340 mainlen = maxy - helplen - 12
1352 mainlen = maxy - helplen - 12
1341 if mainlen < 1:
1353 if mainlen < 1:
1342 raise error.Abort(
1354 raise error.Abort(
1343 _(b"terminal dimensions %d by %d too small for curses histedit")
1355 _(b"terminal dimensions %d by %d too small for curses histedit")
1344 % (maxy, maxx),
1356 % (maxy, maxx),
1345 hint=_(
1357 hint=_(
1346 b"enlarge your terminal or use --config ui.interface=text"
1358 b"enlarge your terminal or use --config ui.interface=text"
1347 ),
1359 ),
1348 )
1360 )
1349 return {
1361 return {
1350 b'commit': (12, maxx),
1362 b'commit': (12, maxx),
1351 b'help': (helplen, maxx),
1363 b'help': (helplen, maxx),
1352 b'main': (mainlen, maxx),
1364 b'main': (mainlen, maxx),
1353 }
1365 }
1354
1366
1355 def display_pos_to_rule_pos(self, display_pos):
1367 def display_pos_to_rule_pos(self, display_pos):
1356 """Converts a position in display order to rule order.
1368 """Converts a position in display order to rule order.
1357
1369
1358 The `display_pos` is the order from the top in display order, not
1370 The `display_pos` is the order from the top in display order, not
1359 considering which items are currently visible on the screen. Thus,
1371 considering which items are currently visible on the screen. Thus,
1360 `display_pos=0` is the item at the top (possibly after scrolling to
1372 `display_pos=0` is the item at the top (possibly after scrolling to
1361 the top)
1373 the top)
1362 """
1374 """
1363 if self.later_on_top:
1375 if self.later_on_top:
1364 return len(self.rules) - 1 - display_pos
1376 return len(self.rules) - 1 - display_pos
1365 else:
1377 else:
1366 return display_pos
1378 return display_pos
1367
1379
1368 def render_rules(self, rulesscr):
1380 def render_rules(self, rulesscr):
1369 start = self.modes[MODE_RULES][b'line_offset']
1381 start = self.modes[MODE_RULES][b'line_offset']
1370
1382
1371 conflicts = [r.ctx for r in self.rules if r.conflicts]
1383 conflicts = [r.ctx for r in self.rules if r.conflicts]
1372 if len(conflicts) > 0:
1384 if len(conflicts) > 0:
1373 line = b"potential conflict in %s" % b','.join(
1385 line = b"potential conflict in %s" % b','.join(
1374 map(pycompat.bytestr, conflicts)
1386 map(pycompat.bytestr, conflicts)
1375 )
1387 )
1376 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1388 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1377
1389
1378 for display_pos in range(start, len(self.rules)):
1390 for display_pos in range(start, len(self.rules)):
1379 y = display_pos - start
1391 y = display_pos - start
1380 if y < 0 or y >= self.page_height:
1392 if y < 0 or y >= self.page_height:
1381 continue
1393 continue
1382 rule_pos = self.display_pos_to_rule_pos(display_pos)
1394 rule_pos = self.display_pos_to_rule_pos(display_pos)
1383 rule = self.rules[rule_pos]
1395 rule = self.rules[rule_pos]
1384 if len(rule.conflicts) > 0:
1396 if len(rule.conflicts) > 0:
1385 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1397 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1386 else:
1398 else:
1387 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1399 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1388
1400
1389 if display_pos == self.selected:
1401 if display_pos == self.selected:
1390 rollcolor = COLOR_ROLL_SELECTED
1402 rollcolor = COLOR_ROLL_SELECTED
1391 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1403 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1392 elif display_pos == self.pos:
1404 elif display_pos == self.pos:
1393 rollcolor = COLOR_ROLL_CURRENT
1405 rollcolor = COLOR_ROLL_CURRENT
1394 addln(
1406 addln(
1395 rulesscr,
1407 rulesscr,
1396 y,
1408 y,
1397 2,
1409 2,
1398 rule,
1410 rule,
1399 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1411 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1400 )
1412 )
1401 else:
1413 else:
1402 rollcolor = COLOR_ROLL
1414 rollcolor = COLOR_ROLL
1403 addln(rulesscr, y, 2, rule)
1415 addln(rulesscr, y, 2, rule)
1404
1416
1405 if rule.action == b'roll':
1417 if rule.action == b'roll':
1406 rulesscr.addstr(
1418 rulesscr.addstr(
1407 y,
1419 y,
1408 2 + len(rule.prefix),
1420 2 + len(rule.prefix),
1409 rule.desc,
1421 rule.desc,
1410 curses.color_pair(rollcolor),
1422 curses.color_pair(rollcolor),
1411 )
1423 )
1412
1424
1413 rulesscr.noutrefresh()
1425 rulesscr.noutrefresh()
1414
1426
1415 def render_string(self, win, output, diffcolors=False):
1427 def render_string(self, win, output, diffcolors=False):
1416 maxy, maxx = win.getmaxyx()
1428 maxy, maxx = win.getmaxyx()
1417 length = min(maxy - 1, len(output))
1429 length = min(maxy - 1, len(output))
1418 for y in range(0, length):
1430 for y in range(0, length):
1419 line = output[y]
1431 line = output[y]
1420 if diffcolors:
1432 if diffcolors:
1421 if line and line[0] == b'+':
1433 if line and line[0] == b'+':
1422 win.addstr(
1434 win.addstr(
1423 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1435 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1424 )
1436 )
1425 elif line and line[0] == b'-':
1437 elif line and line[0] == b'-':
1426 win.addstr(
1438 win.addstr(
1427 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1439 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1428 )
1440 )
1429 elif line.startswith(b'@@ '):
1441 elif line.startswith(b'@@ '):
1430 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1442 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1431 else:
1443 else:
1432 win.addstr(y, 0, line)
1444 win.addstr(y, 0, line)
1433 else:
1445 else:
1434 win.addstr(y, 0, line)
1446 win.addstr(y, 0, line)
1435 win.noutrefresh()
1447 win.noutrefresh()
1436
1448
1437 def render_patch(self, win):
1449 def render_patch(self, win):
1438 start = self.modes[MODE_PATCH][b'line_offset']
1450 start = self.modes[MODE_PATCH][b'line_offset']
1439 content = self.modes[MODE_PATCH][b'patchcontents']
1451 content = self.modes[MODE_PATCH][b'patchcontents']
1440 self.render_string(win, content[start:], diffcolors=True)
1452 self.render_string(win, content[start:], diffcolors=True)
1441
1453
1442 def event(self, ch):
1454 def event(self, ch):
1443 """Change state based on the current character input
1455 """Change state based on the current character input
1444
1456
1445 This takes the current state and based on the current character input from
1457 This takes the current state and based on the current character input from
1446 the user we change the state.
1458 the user we change the state.
1447 """
1459 """
1448 oldpos = self.pos
1460 oldpos = self.pos
1449
1461
1450 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1462 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1451 return E_RESIZE
1463 return E_RESIZE
1452
1464
1453 lookup_ch = ch
1465 lookup_ch = ch
1454 if ch is not None and b'0' <= ch <= b'9':
1466 if ch is not None and b'0' <= ch <= b'9':
1455 lookup_ch = b'0'
1467 lookup_ch = b'0'
1456
1468
1457 curmode, prevmode = self.mode
1469 curmode, prevmode = self.mode
1458 action = KEYTABLE[curmode].get(
1470 action = KEYTABLE[curmode].get(
1459 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1471 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1460 )
1472 )
1461 if action is None:
1473 if action is None:
1462 return
1474 return
1463 if action in (b'down', b'move-down'):
1475 if action in (b'down', b'move-down'):
1464 newpos = min(oldpos + 1, len(self.rules) - 1)
1476 newpos = min(oldpos + 1, len(self.rules) - 1)
1465 self.move_cursor(oldpos, newpos)
1477 self.move_cursor(oldpos, newpos)
1466 if self.selected is not None or action == b'move-down':
1478 if self.selected is not None or action == b'move-down':
1467 self.swap(oldpos, newpos)
1479 self.swap(oldpos, newpos)
1468 elif action in (b'up', b'move-up'):
1480 elif action in (b'up', b'move-up'):
1469 newpos = max(0, oldpos - 1)
1481 newpos = max(0, oldpos - 1)
1470 self.move_cursor(oldpos, newpos)
1482 self.move_cursor(oldpos, newpos)
1471 if self.selected is not None or action == b'move-up':
1483 if self.selected is not None or action == b'move-up':
1472 self.swap(oldpos, newpos)
1484 self.swap(oldpos, newpos)
1473 elif action == b'next-action':
1485 elif action == b'next-action':
1474 self.cycle_action(oldpos, next=True)
1486 self.cycle_action(oldpos, next=True)
1475 elif action == b'prev-action':
1487 elif action == b'prev-action':
1476 self.cycle_action(oldpos, next=False)
1488 self.cycle_action(oldpos, next=False)
1477 elif action == b'select':
1489 elif action == b'select':
1478 self.selected = oldpos if self.selected is None else None
1490 self.selected = oldpos if self.selected is None else None
1479 self.make_selection(self.selected)
1491 self.make_selection(self.selected)
1480 elif action == b'goto' and int(ch) < len(self.rules) <= 10:
1492 elif action == b'goto' and int(ch) < len(self.rules) <= 10:
1481 newrule = next((r for r in self.rules if r.origpos == int(ch)))
1493 newrule = next((r for r in self.rules if r.origpos == int(ch)))
1482 self.move_cursor(oldpos, newrule.pos)
1494 self.move_cursor(oldpos, newrule.pos)
1483 if self.selected is not None:
1495 if self.selected is not None:
1484 self.swap(oldpos, newrule.pos)
1496 self.swap(oldpos, newrule.pos)
1485 elif action.startswith(b'action-'):
1497 elif action.startswith(b'action-'):
1486 self.change_action(oldpos, action[7:])
1498 self.change_action(oldpos, action[7:])
1487 elif action == b'showpatch':
1499 elif action == b'showpatch':
1488 self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode)
1500 self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode)
1489 elif action == b'help':
1501 elif action == b'help':
1490 self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode)
1502 self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode)
1491 elif action == b'quit':
1503 elif action == b'quit':
1492 return E_QUIT
1504 return E_QUIT
1493 elif action == b'histedit':
1505 elif action == b'histedit':
1494 return E_HISTEDIT
1506 return E_HISTEDIT
1495 elif action == b'page-down':
1507 elif action == b'page-down':
1496 return E_PAGEDOWN
1508 return E_PAGEDOWN
1497 elif action == b'page-up':
1509 elif action == b'page-up':
1498 return E_PAGEUP
1510 return E_PAGEUP
1499 elif action == b'line-down':
1511 elif action == b'line-down':
1500 return E_LINEDOWN
1512 return E_LINEDOWN
1501 elif action == b'line-up':
1513 elif action == b'line-up':
1502 return E_LINEUP
1514 return E_LINEUP
1503
1515
1504 def patch_contents(self):
1516 def patch_contents(self):
1505 repo = self.repo
1517 repo = self.repo
1506 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1518 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1507 displayer = logcmdutil.changesetdisplayer(
1519 displayer = logcmdutil.changesetdisplayer(
1508 repo.ui,
1520 repo.ui,
1509 repo,
1521 repo,
1510 {b"patch": True, b"template": b"status"},
1522 {b"patch": True, b"template": b"status"},
1511 buffered=True,
1523 buffered=True,
1512 )
1524 )
1513 overrides = {(b'ui', b'verbose'): True}
1525 overrides = {(b'ui', b'verbose'): True}
1514 with repo.ui.configoverride(overrides, source=b'histedit'):
1526 with repo.ui.configoverride(overrides, source=b'histedit'):
1515 displayer.show(rule.ctx)
1527 displayer.show(rule.ctx)
1516 displayer.close()
1528 displayer.close()
1517 return displayer.hunk[rule.ctx.rev()].splitlines()
1529 return displayer.hunk[rule.ctx.rev()].splitlines()
1518
1530
1519 def move_cursor(self, oldpos, newpos):
1531 def move_cursor(self, oldpos, newpos):
1520 """Change the rule/changeset that the cursor is pointing to, regardless of
1532 """Change the rule/changeset that the cursor is pointing to, regardless of
1521 current mode (you can switch between patches from the view patch window)."""
1533 current mode (you can switch between patches from the view patch window)."""
1522 self.pos = newpos
1534 self.pos = newpos
1523
1535
1524 mode, _ = self.mode
1536 mode, _ = self.mode
1525 if mode == MODE_RULES:
1537 if mode == MODE_RULES:
1526 # Scroll through the list by updating the view for MODE_RULES, so that
1538 # Scroll through the list by updating the view for MODE_RULES, so that
1527 # even if we are not currently viewing the rules, switching back will
1539 # even if we are not currently viewing the rules, switching back will
1528 # result in the cursor's rule being visible.
1540 # result in the cursor's rule being visible.
1529 modestate = self.modes[MODE_RULES]
1541 modestate = self.modes[MODE_RULES]
1530 if newpos < modestate[b'line_offset']:
1542 if newpos < modestate[b'line_offset']:
1531 modestate[b'line_offset'] = newpos
1543 modestate[b'line_offset'] = newpos
1532 elif newpos > modestate[b'line_offset'] + self.page_height - 1:
1544 elif newpos > modestate[b'line_offset'] + self.page_height - 1:
1533 modestate[b'line_offset'] = newpos - self.page_height + 1
1545 modestate[b'line_offset'] = newpos - self.page_height + 1
1534
1546
1535 # Reset the patch view region to the top of the new patch.
1547 # Reset the patch view region to the top of the new patch.
1536 self.modes[MODE_PATCH][b'line_offset'] = 0
1548 self.modes[MODE_PATCH][b'line_offset'] = 0
1537
1549
1538 def change_mode(self, mode):
1550 def change_mode(self, mode):
1539 curmode, _ = self.mode
1551 curmode, _ = self.mode
1540 self.mode = (mode, curmode)
1552 self.mode = (mode, curmode)
1541 if mode == MODE_PATCH:
1553 if mode == MODE_PATCH:
1542 self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents()
1554 self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents()
1543
1555
1544 def make_selection(self, pos):
1556 def make_selection(self, pos):
1545 self.selected = pos
1557 self.selected = pos
1546
1558
1547 def swap(self, oldpos, newpos):
1559 def swap(self, oldpos, newpos):
1548 """Swap two positions and calculate necessary conflicts in
1560 """Swap two positions and calculate necessary conflicts in
1549 O(|newpos-oldpos|) time"""
1561 O(|newpos-oldpos|) time"""
1550 old_rule_pos = self.display_pos_to_rule_pos(oldpos)
1562 old_rule_pos = self.display_pos_to_rule_pos(oldpos)
1551 new_rule_pos = self.display_pos_to_rule_pos(newpos)
1563 new_rule_pos = self.display_pos_to_rule_pos(newpos)
1552
1564
1553 rules = self.rules
1565 rules = self.rules
1554 assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules)
1566 assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules)
1555
1567
1556 rules[old_rule_pos], rules[new_rule_pos] = (
1568 rules[old_rule_pos], rules[new_rule_pos] = (
1557 rules[new_rule_pos],
1569 rules[new_rule_pos],
1558 rules[old_rule_pos],
1570 rules[old_rule_pos],
1559 )
1571 )
1560
1572
1561 # TODO: swap should not know about histeditrule's internals
1573 # TODO: swap should not know about histeditrule's internals
1562 rules[new_rule_pos].pos = new_rule_pos
1574 rules[new_rule_pos].pos = new_rule_pos
1563 rules[old_rule_pos].pos = old_rule_pos
1575 rules[old_rule_pos].pos = old_rule_pos
1564
1576
1565 start = min(old_rule_pos, new_rule_pos)
1577 start = min(old_rule_pos, new_rule_pos)
1566 end = max(old_rule_pos, new_rule_pos)
1578 end = max(old_rule_pos, new_rule_pos)
1567 for r in pycompat.xrange(start, end + 1):
1579 for r in pycompat.xrange(start, end + 1):
1568 rules[new_rule_pos].checkconflicts(rules[r])
1580 rules[new_rule_pos].checkconflicts(rules[r])
1569 rules[old_rule_pos].checkconflicts(rules[r])
1581 rules[old_rule_pos].checkconflicts(rules[r])
1570
1582
1571 if self.selected:
1583 if self.selected:
1572 self.make_selection(newpos)
1584 self.make_selection(newpos)
1573
1585
1574 def change_action(self, pos, action):
1586 def change_action(self, pos, action):
1575 """Change the action state on the given position to the new action"""
1587 """Change the action state on the given position to the new action"""
1576 assert 0 <= pos < len(self.rules)
1588 assert 0 <= pos < len(self.rules)
1577 self.rules[pos].action = action
1589 self.rules[pos].action = action
1578
1590
1579 def cycle_action(self, pos, next=False):
1591 def cycle_action(self, pos, next=False):
1580 """Changes the action state the next or the previous action from
1592 """Changes the action state the next or the previous action from
1581 the action list"""
1593 the action list"""
1582 assert 0 <= pos < len(self.rules)
1594 assert 0 <= pos < len(self.rules)
1583 current = self.rules[pos].action
1595 current = self.rules[pos].action
1584
1596
1585 assert current in KEY_LIST
1597 assert current in KEY_LIST
1586
1598
1587 index = KEY_LIST.index(current)
1599 index = KEY_LIST.index(current)
1588 if next:
1600 if next:
1589 index += 1
1601 index += 1
1590 else:
1602 else:
1591 index -= 1
1603 index -= 1
1592 self.change_action(pos, KEY_LIST[index % len(KEY_LIST)])
1604 self.change_action(pos, KEY_LIST[index % len(KEY_LIST)])
1593
1605
1594 def change_view(self, delta, unit):
1606 def change_view(self, delta, unit):
1595 """Change the region of whatever is being viewed (a patch or the list of
1607 """Change the region of whatever is being viewed (a patch or the list of
1596 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1608 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1597 mode, _ = self.mode
1609 mode, _ = self.mode
1598 if mode != MODE_PATCH:
1610 if mode != MODE_PATCH:
1599 return
1611 return
1600 mode_state = self.modes[mode]
1612 mode_state = self.modes[mode]
1601 num_lines = len(mode_state[b'patchcontents'])
1613 num_lines = len(mode_state[b'patchcontents'])
1602 page_height = self.page_height
1614 page_height = self.page_height
1603 unit = page_height if unit == b'page' else 1
1615 unit = page_height if unit == b'page' else 1
1604 num_pages = 1 + (num_lines - 1) // page_height
1616 num_pages = 1 + (num_lines - 1) // page_height
1605 max_offset = (num_pages - 1) * page_height
1617 max_offset = (num_pages - 1) * page_height
1606 newline = mode_state[b'line_offset'] + delta * unit
1618 newline = mode_state[b'line_offset'] + delta * unit
1607 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1619 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1608
1620
1609
1621
1610 def _chisteditmain(repo, rules, stdscr):
1622 def _chisteditmain(repo, rules, stdscr):
1611 try:
1623 try:
1612 curses.use_default_colors()
1624 curses.use_default_colors()
1613 except curses.error:
1625 except curses.error:
1614 pass
1626 pass
1615
1627
1616 # initialize color pattern
1628 # initialize color pattern
1617 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1629 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1618 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1630 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1619 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1631 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1620 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1632 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1621 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1633 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1622 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1634 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1623 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1635 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1624 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1636 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1625 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1637 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1626 curses.init_pair(
1638 curses.init_pair(
1627 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1639 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1628 )
1640 )
1629 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1641 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1630
1642
1631 # don't display the cursor
1643 # don't display the cursor
1632 try:
1644 try:
1633 curses.curs_set(0)
1645 curses.curs_set(0)
1634 except curses.error:
1646 except curses.error:
1635 pass
1647 pass
1636
1648
1637 def drawvertwin(size, y, x):
1649 def drawvertwin(size, y, x):
1638 win = curses.newwin(size[0], size[1], y, x)
1650 win = curses.newwin(size[0], size[1], y, x)
1639 y += size[0]
1651 y += size[0]
1640 return win, y, x
1652 return win, y, x
1641
1653
1642 state = _chistedit_state(repo, rules, stdscr)
1654 state = _chistedit_state(repo, rules, stdscr)
1643
1655
1644 # eventloop
1656 # eventloop
1645 ch = None
1657 ch = None
1646 stdscr.clear()
1658 stdscr.clear()
1647 stdscr.refresh()
1659 stdscr.refresh()
1648 while True:
1660 while True:
1649 oldmode, unused = state.mode
1661 oldmode, unused = state.mode
1650 if oldmode == MODE_INIT:
1662 if oldmode == MODE_INIT:
1651 state.change_mode(MODE_RULES)
1663 state.change_mode(MODE_RULES)
1652 e = state.event(ch)
1664 e = state.event(ch)
1653
1665
1654 if e == E_QUIT:
1666 if e == E_QUIT:
1655 return False
1667 return False
1656 if e == E_HISTEDIT:
1668 if e == E_HISTEDIT:
1657 return state.rules
1669 return state.rules
1658 else:
1670 else:
1659 if e == E_RESIZE:
1671 if e == E_RESIZE:
1660 size = screen_size()
1672 size = screen_size()
1661 if size != stdscr.getmaxyx():
1673 if size != stdscr.getmaxyx():
1662 curses.resizeterm(*size)
1674 curses.resizeterm(*size)
1663
1675
1664 sizes = state.layout()
1676 sizes = state.layout()
1665 curmode, unused = state.mode
1677 curmode, unused = state.mode
1666 if curmode != oldmode:
1678 if curmode != oldmode:
1667 state.page_height = sizes[b'main'][0]
1679 state.page_height = sizes[b'main'][0]
1668 # Adjust the view to fit the current screen size.
1680 # Adjust the view to fit the current screen size.
1669 state.move_cursor(state.pos, state.pos)
1681 state.move_cursor(state.pos, state.pos)
1670
1682
1671 # Pack the windows against the top, each pane spread across the
1683 # Pack the windows against the top, each pane spread across the
1672 # full width of the screen.
1684 # full width of the screen.
1673 y, x = (0, 0)
1685 y, x = (0, 0)
1674 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1686 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1675 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1687 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1676 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1688 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1677
1689
1678 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1690 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1679 if e == E_PAGEDOWN:
1691 if e == E_PAGEDOWN:
1680 state.change_view(+1, b'page')
1692 state.change_view(+1, b'page')
1681 elif e == E_PAGEUP:
1693 elif e == E_PAGEUP:
1682 state.change_view(-1, b'page')
1694 state.change_view(-1, b'page')
1683 elif e == E_LINEDOWN:
1695 elif e == E_LINEDOWN:
1684 state.change_view(+1, b'line')
1696 state.change_view(+1, b'line')
1685 elif e == E_LINEUP:
1697 elif e == E_LINEUP:
1686 state.change_view(-1, b'line')
1698 state.change_view(-1, b'line')
1687
1699
1688 # start rendering
1700 # start rendering
1689 commitwin.erase()
1701 commitwin.erase()
1690 helpwin.erase()
1702 helpwin.erase()
1691 mainwin.erase()
1703 mainwin.erase()
1692 if curmode == MODE_PATCH:
1704 if curmode == MODE_PATCH:
1693 state.render_patch(mainwin)
1705 state.render_patch(mainwin)
1694 elif curmode == MODE_HELP:
1706 elif curmode == MODE_HELP:
1695 state.render_string(mainwin, __doc__.strip().splitlines())
1707 state.render_string(mainwin, __doc__.strip().splitlines())
1696 else:
1708 else:
1697 state.render_rules(mainwin)
1709 state.render_rules(mainwin)
1698 state.render_commit(commitwin)
1710 state.render_commit(commitwin)
1699 state.render_help(helpwin)
1711 state.render_help(helpwin)
1700 curses.doupdate()
1712 curses.doupdate()
1701 # done rendering
1713 # done rendering
1702 ch = encoding.strtolocal(stdscr.getkey())
1714 ch = encoding.strtolocal(stdscr.getkey())
1703
1715
1704
1716
1705 def _chistedit(ui, repo, freeargs, opts):
1717 def _chistedit(ui, repo, freeargs, opts):
1706 """interactively edit changeset history via a curses interface
1718 """interactively edit changeset history via a curses interface
1707
1719
1708 Provides a ncurses interface to histedit. Press ? in chistedit mode
1720 Provides a ncurses interface to histedit. Press ? in chistedit mode
1709 to see an extensive help. Requires python-curses to be installed."""
1721 to see an extensive help. Requires python-curses to be installed."""
1710
1722
1711 if curses is None:
1723 if curses is None:
1712 raise error.Abort(_(b"Python curses library required"))
1724 raise error.Abort(_(b"Python curses library required"))
1713
1725
1714 # disable color
1726 # disable color
1715 ui._colormode = None
1727 ui._colormode = None
1716
1728
1717 try:
1729 try:
1718 keep = opts.get(b'keep')
1730 keep = opts.get(b'keep')
1719 revs = opts.get(b'rev', [])[:]
1731 revs = opts.get(b'rev', [])[:]
1720 cmdutil.checkunfinished(repo)
1732 cmdutil.checkunfinished(repo)
1721 cmdutil.bailifchanged(repo)
1733 cmdutil.bailifchanged(repo)
1722
1734
1723 revs.extend(freeargs)
1735 revs.extend(freeargs)
1724 if not revs:
1736 if not revs:
1725 defaultrev = destutil.desthistedit(ui, repo)
1737 defaultrev = destutil.desthistedit(ui, repo)
1726 if defaultrev is not None:
1738 if defaultrev is not None:
1727 revs.append(defaultrev)
1739 revs.append(defaultrev)
1728 if len(revs) != 1:
1740 if len(revs) != 1:
1729 raise error.InputError(
1741 raise error.InputError(
1730 _(b'histedit requires exactly one ancestor revision')
1742 _(b'histedit requires exactly one ancestor revision')
1731 )
1743 )
1732
1744
1733 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
1745 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
1734 if len(rr) != 1:
1746 if len(rr) != 1:
1735 raise error.InputError(
1747 raise error.InputError(
1736 _(
1748 _(
1737 b'The specified revisions must have '
1749 b'The specified revisions must have '
1738 b'exactly one common root'
1750 b'exactly one common root'
1739 )
1751 )
1740 )
1752 )
1741 root = rr[0].node()
1753 root = rr[0].node()
1742
1754
1743 topmost = repo.dirstate.p1()
1755 topmost = repo.dirstate.p1()
1744 revs = between(repo, root, topmost, keep)
1756 revs = between(repo, root, topmost, keep)
1745 if not revs:
1757 if not revs:
1746 raise error.InputError(
1758 raise error.InputError(
1747 _(b'%s is not an ancestor of working directory') % short(root)
1759 _(b'%s is not an ancestor of working directory') % short(root)
1748 )
1760 )
1749
1761
1750 rules = []
1762 rules = []
1751 for i, r in enumerate(revs):
1763 for i, r in enumerate(revs):
1752 rules.append(histeditrule(ui, repo[r], i))
1764 rules.append(histeditrule(ui, repo[r], i))
1753 with util.with_lc_ctype():
1765 with util.with_lc_ctype():
1754 rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules))
1766 rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules))
1755 curses.echo()
1767 curses.echo()
1756 curses.endwin()
1768 curses.endwin()
1757 if rc is False:
1769 if rc is False:
1758 ui.write(_(b"histedit aborted\n"))
1770 ui.write(_(b"histedit aborted\n"))
1759 return 0
1771 return 0
1760 if type(rc) is list:
1772 if type(rc) is list:
1761 ui.status(_(b"performing changes\n"))
1773 ui.status(_(b"performing changes\n"))
1762 rules = makecommands(rc)
1774 rules = makecommands(rc)
1763 with repo.vfs(b'chistedit', b'w+') as fp:
1775 with repo.vfs(b'chistedit', b'w+') as fp:
1764 for r in rules:
1776 for r in rules:
1765 fp.write(r)
1777 fp.write(r)
1766 opts[b'commands'] = fp.name
1778 opts[b'commands'] = fp.name
1767 return _texthistedit(ui, repo, freeargs, opts)
1779 return _texthistedit(ui, repo, freeargs, opts)
1768 except KeyboardInterrupt:
1780 except KeyboardInterrupt:
1769 pass
1781 pass
1770 return -1
1782 return -1
1771
1783
1772
1784
1773 @command(
1785 @command(
1774 b'histedit',
1786 b'histedit',
1775 [
1787 [
1776 (
1788 (
1777 b'',
1789 b'',
1778 b'commands',
1790 b'commands',
1779 b'',
1791 b'',
1780 _(b'read history edits from the specified file'),
1792 _(b'read history edits from the specified file'),
1781 _(b'FILE'),
1793 _(b'FILE'),
1782 ),
1794 ),
1783 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1795 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1784 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1796 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1785 (
1797 (
1786 b'k',
1798 b'k',
1787 b'keep',
1799 b'keep',
1788 False,
1800 False,
1789 _(b"don't strip old nodes after edit is complete"),
1801 _(b"don't strip old nodes after edit is complete"),
1790 ),
1802 ),
1791 (b'', b'abort', False, _(b'abort an edit in progress')),
1803 (b'', b'abort', False, _(b'abort an edit in progress')),
1792 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1804 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1793 (
1805 (
1794 b'f',
1806 b'f',
1795 b'force',
1807 b'force',
1796 False,
1808 False,
1797 _(b'force outgoing even for unrelated repositories'),
1809 _(b'force outgoing even for unrelated repositories'),
1798 ),
1810 ),
1799 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1811 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1800 ]
1812 ]
1801 + cmdutil.formatteropts,
1813 + cmdutil.formatteropts,
1802 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1814 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1803 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1815 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1804 )
1816 )
1805 def histedit(ui, repo, *freeargs, **opts):
1817 def histedit(ui, repo, *freeargs, **opts):
1806 """interactively edit changeset history
1818 """interactively edit changeset history
1807
1819
1808 This command lets you edit a linear series of changesets (up to
1820 This command lets you edit a linear series of changesets (up to
1809 and including the working directory, which should be clean).
1821 and including the working directory, which should be clean).
1810 You can:
1822 You can:
1811
1823
1812 - `pick` to [re]order a changeset
1824 - `pick` to [re]order a changeset
1813
1825
1814 - `drop` to omit changeset
1826 - `drop` to omit changeset
1815
1827
1816 - `mess` to reword the changeset commit message
1828 - `mess` to reword the changeset commit message
1817
1829
1818 - `fold` to combine it with the preceding changeset (using the later date)
1830 - `fold` to combine it with the preceding changeset (using the later date)
1819
1831
1820 - `roll` like fold, but discarding this commit's description and date
1832 - `roll` like fold, but discarding this commit's description and date
1821
1833
1822 - `edit` to edit this changeset (preserving date)
1834 - `edit` to edit this changeset (preserving date)
1823
1835
1824 - `base` to checkout changeset and apply further changesets from there
1836 - `base` to checkout changeset and apply further changesets from there
1825
1837
1826 There are a number of ways to select the root changeset:
1838 There are a number of ways to select the root changeset:
1827
1839
1828 - Specify ANCESTOR directly
1840 - Specify ANCESTOR directly
1829
1841
1830 - Use --outgoing -- it will be the first linear changeset not
1842 - Use --outgoing -- it will be the first linear changeset not
1831 included in destination. (See :hg:`help config.paths.default-push`)
1843 included in destination. (See :hg:`help config.paths.default-push`)
1832
1844
1833 - Otherwise, the value from the "histedit.defaultrev" config option
1845 - Otherwise, the value from the "histedit.defaultrev" config option
1834 is used as a revset to select the base revision when ANCESTOR is not
1846 is used as a revset to select the base revision when ANCESTOR is not
1835 specified. The first revision returned by the revset is used. By
1847 specified. The first revision returned by the revset is used. By
1836 default, this selects the editable history that is unique to the
1848 default, this selects the editable history that is unique to the
1837 ancestry of the working directory.
1849 ancestry of the working directory.
1838
1850
1839 .. container:: verbose
1851 .. container:: verbose
1840
1852
1841 If you use --outgoing, this command will abort if there are ambiguous
1853 If you use --outgoing, this command will abort if there are ambiguous
1842 outgoing revisions. For example, if there are multiple branches
1854 outgoing revisions. For example, if there are multiple branches
1843 containing outgoing revisions.
1855 containing outgoing revisions.
1844
1856
1845 Use "min(outgoing() and ::.)" or similar revset specification
1857 Use "min(outgoing() and ::.)" or similar revset specification
1846 instead of --outgoing to specify edit target revision exactly in
1858 instead of --outgoing to specify edit target revision exactly in
1847 such ambiguous situation. See :hg:`help revsets` for detail about
1859 such ambiguous situation. See :hg:`help revsets` for detail about
1848 selecting revisions.
1860 selecting revisions.
1849
1861
1850 .. container:: verbose
1862 .. container:: verbose
1851
1863
1852 Examples:
1864 Examples:
1853
1865
1854 - A number of changes have been made.
1866 - A number of changes have been made.
1855 Revision 3 is no longer needed.
1867 Revision 3 is no longer needed.
1856
1868
1857 Start history editing from revision 3::
1869 Start history editing from revision 3::
1858
1870
1859 hg histedit -r 3
1871 hg histedit -r 3
1860
1872
1861 An editor opens, containing the list of revisions,
1873 An editor opens, containing the list of revisions,
1862 with specific actions specified::
1874 with specific actions specified::
1863
1875
1864 pick 5339bf82f0ca 3 Zworgle the foobar
1876 pick 5339bf82f0ca 3 Zworgle the foobar
1865 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1877 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1866 pick 0a9639fcda9d 5 Morgify the cromulancy
1878 pick 0a9639fcda9d 5 Morgify the cromulancy
1867
1879
1868 Additional information about the possible actions
1880 Additional information about the possible actions
1869 to take appears below the list of revisions.
1881 to take appears below the list of revisions.
1870
1882
1871 To remove revision 3 from the history,
1883 To remove revision 3 from the history,
1872 its action (at the beginning of the relevant line)
1884 its action (at the beginning of the relevant line)
1873 is changed to 'drop'::
1885 is changed to 'drop'::
1874
1886
1875 drop 5339bf82f0ca 3 Zworgle the foobar
1887 drop 5339bf82f0ca 3 Zworgle the foobar
1876 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1888 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1877 pick 0a9639fcda9d 5 Morgify the cromulancy
1889 pick 0a9639fcda9d 5 Morgify the cromulancy
1878
1890
1879 - A number of changes have been made.
1891 - A number of changes have been made.
1880 Revision 2 and 4 need to be swapped.
1892 Revision 2 and 4 need to be swapped.
1881
1893
1882 Start history editing from revision 2::
1894 Start history editing from revision 2::
1883
1895
1884 hg histedit -r 2
1896 hg histedit -r 2
1885
1897
1886 An editor opens, containing the list of revisions,
1898 An editor opens, containing the list of revisions,
1887 with specific actions specified::
1899 with specific actions specified::
1888
1900
1889 pick 252a1af424ad 2 Blorb a morgwazzle
1901 pick 252a1af424ad 2 Blorb a morgwazzle
1890 pick 5339bf82f0ca 3 Zworgle the foobar
1902 pick 5339bf82f0ca 3 Zworgle the foobar
1891 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1903 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1892
1904
1893 To swap revision 2 and 4, its lines are swapped
1905 To swap revision 2 and 4, its lines are swapped
1894 in the editor::
1906 in the editor::
1895
1907
1896 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1908 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1897 pick 5339bf82f0ca 3 Zworgle the foobar
1909 pick 5339bf82f0ca 3 Zworgle the foobar
1898 pick 252a1af424ad 2 Blorb a morgwazzle
1910 pick 252a1af424ad 2 Blorb a morgwazzle
1899
1911
1900 Returns 0 on success, 1 if user intervention is required (not only
1912 Returns 0 on success, 1 if user intervention is required (not only
1901 for intentional "edit" command, but also for resolving unexpected
1913 for intentional "edit" command, but also for resolving unexpected
1902 conflicts).
1914 conflicts).
1903 """
1915 """
1904 opts = pycompat.byteskwargs(opts)
1916 opts = pycompat.byteskwargs(opts)
1905
1917
1906 # kludge: _chistedit only works for starting an edit, not aborting
1918 # kludge: _chistedit only works for starting an edit, not aborting
1907 # or continuing, so fall back to regular _texthistedit for those
1919 # or continuing, so fall back to regular _texthistedit for those
1908 # operations.
1920 # operations.
1909 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1921 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1910 return _chistedit(ui, repo, freeargs, opts)
1922 return _chistedit(ui, repo, freeargs, opts)
1911 return _texthistedit(ui, repo, freeargs, opts)
1923 return _texthistedit(ui, repo, freeargs, opts)
1912
1924
1913
1925
1914 def _texthistedit(ui, repo, freeargs, opts):
1926 def _texthistedit(ui, repo, freeargs, opts):
1915 state = histeditstate(repo)
1927 state = histeditstate(repo)
1916 with repo.wlock() as wlock, repo.lock() as lock:
1928 with repo.wlock() as wlock, repo.lock() as lock:
1917 state.wlock = wlock
1929 state.wlock = wlock
1918 state.lock = lock
1930 state.lock = lock
1919 _histedit(ui, repo, state, freeargs, opts)
1931 _histedit(ui, repo, state, freeargs, opts)
1920
1932
1921
1933
1922 goalcontinue = b'continue'
1934 goalcontinue = b'continue'
1923 goalabort = b'abort'
1935 goalabort = b'abort'
1924 goaleditplan = b'edit-plan'
1936 goaleditplan = b'edit-plan'
1925 goalnew = b'new'
1937 goalnew = b'new'
1926
1938
1927
1939
1928 def _getgoal(opts):
1940 def _getgoal(opts):
1929 if opts.get(b'continue'):
1941 if opts.get(b'continue'):
1930 return goalcontinue
1942 return goalcontinue
1931 if opts.get(b'abort'):
1943 if opts.get(b'abort'):
1932 return goalabort
1944 return goalabort
1933 if opts.get(b'edit_plan'):
1945 if opts.get(b'edit_plan'):
1934 return goaleditplan
1946 return goaleditplan
1935 return goalnew
1947 return goalnew
1936
1948
1937
1949
1938 def _readfile(ui, path):
1950 def _readfile(ui, path):
1939 if path == b'-':
1951 if path == b'-':
1940 with ui.timeblockedsection(b'histedit'):
1952 with ui.timeblockedsection(b'histedit'):
1941 return ui.fin.read()
1953 return ui.fin.read()
1942 else:
1954 else:
1943 with open(path, b'rb') as f:
1955 with open(path, b'rb') as f:
1944 return f.read()
1956 return f.read()
1945
1957
1946
1958
1947 def _validateargs(ui, repo, freeargs, opts, goal, rules, revs):
1959 def _validateargs(ui, repo, freeargs, opts, goal, rules, revs):
1948 # TODO only abort if we try to histedit mq patches, not just
1960 # TODO only abort if we try to histedit mq patches, not just
1949 # blanket if mq patches are applied somewhere
1961 # blanket if mq patches are applied somewhere
1950 mq = getattr(repo, 'mq', None)
1962 mq = getattr(repo, 'mq', None)
1951 if mq and mq.applied:
1963 if mq and mq.applied:
1952 raise error.StateError(_(b'source has mq patches applied'))
1964 raise error.StateError(_(b'source has mq patches applied'))
1953
1965
1954 # basic argument incompatibility processing
1966 # basic argument incompatibility processing
1955 outg = opts.get(b'outgoing')
1967 outg = opts.get(b'outgoing')
1956 editplan = opts.get(b'edit_plan')
1968 editplan = opts.get(b'edit_plan')
1957 abort = opts.get(b'abort')
1969 abort = opts.get(b'abort')
1958 force = opts.get(b'force')
1970 force = opts.get(b'force')
1959 if force and not outg:
1971 if force and not outg:
1960 raise error.InputError(_(b'--force only allowed with --outgoing'))
1972 raise error.InputError(_(b'--force only allowed with --outgoing'))
1961 if goal == b'continue':
1973 if goal == b'continue':
1962 if any((outg, abort, revs, freeargs, rules, editplan)):
1974 if any((outg, abort, revs, freeargs, rules, editplan)):
1963 raise error.InputError(_(b'no arguments allowed with --continue'))
1975 raise error.InputError(_(b'no arguments allowed with --continue'))
1964 elif goal == b'abort':
1976 elif goal == b'abort':
1965 if any((outg, revs, freeargs, rules, editplan)):
1977 if any((outg, revs, freeargs, rules, editplan)):
1966 raise error.InputError(_(b'no arguments allowed with --abort'))
1978 raise error.InputError(_(b'no arguments allowed with --abort'))
1967 elif goal == b'edit-plan':
1979 elif goal == b'edit-plan':
1968 if any((outg, revs, freeargs)):
1980 if any((outg, revs, freeargs)):
1969 raise error.InputError(
1981 raise error.InputError(
1970 _(b'only --commands argument allowed with --edit-plan')
1982 _(b'only --commands argument allowed with --edit-plan')
1971 )
1983 )
1972 else:
1984 else:
1973 if outg:
1985 if outg:
1974 if revs:
1986 if revs:
1975 raise error.InputError(
1987 raise error.InputError(
1976 _(b'no revisions allowed with --outgoing')
1988 _(b'no revisions allowed with --outgoing')
1977 )
1989 )
1978 if len(freeargs) > 1:
1990 if len(freeargs) > 1:
1979 raise error.InputError(
1991 raise error.InputError(
1980 _(b'only one repo argument allowed with --outgoing')
1992 _(b'only one repo argument allowed with --outgoing')
1981 )
1993 )
1982 else:
1994 else:
1983 revs.extend(freeargs)
1995 revs.extend(freeargs)
1984 if len(revs) == 0:
1996 if len(revs) == 0:
1985 defaultrev = destutil.desthistedit(ui, repo)
1997 defaultrev = destutil.desthistedit(ui, repo)
1986 if defaultrev is not None:
1998 if defaultrev is not None:
1987 revs.append(defaultrev)
1999 revs.append(defaultrev)
1988
2000
1989 if len(revs) != 1:
2001 if len(revs) != 1:
1990 raise error.InputError(
2002 raise error.InputError(
1991 _(b'histedit requires exactly one ancestor revision')
2003 _(b'histedit requires exactly one ancestor revision')
1992 )
2004 )
1993
2005
1994
2006
1995 def _histedit(ui, repo, state, freeargs, opts):
2007 def _histedit(ui, repo, state, freeargs, opts):
1996 fm = ui.formatter(b'histedit', opts)
2008 fm = ui.formatter(b'histedit', opts)
1997 fm.startitem()
2009 fm.startitem()
1998 goal = _getgoal(opts)
2010 goal = _getgoal(opts)
1999 revs = opts.get(b'rev', [])
2011 revs = opts.get(b'rev', [])
2000 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2012 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2001 rules = opts.get(b'commands', b'')
2013 rules = opts.get(b'commands', b'')
2002 state.keep = opts.get(b'keep', False)
2014 state.keep = opts.get(b'keep', False)
2003
2015
2004 _validateargs(ui, repo, freeargs, opts, goal, rules, revs)
2016 _validateargs(ui, repo, freeargs, opts, goal, rules, revs)
2005
2017
2006 hastags = False
2018 hastags = False
2007 if revs:
2019 if revs:
2008 revs = logcmdutil.revrange(repo, revs)
2020 revs = logcmdutil.revrange(repo, revs)
2009 ctxs = [repo[rev] for rev in revs]
2021 ctxs = [repo[rev] for rev in revs]
2010 for ctx in ctxs:
2022 for ctx in ctxs:
2011 tags = [tag for tag in ctx.tags() if tag != b'tip']
2023 tags = [tag for tag in ctx.tags() if tag != b'tip']
2012 if not hastags:
2024 if not hastags:
2013 hastags = len(tags)
2025 hastags = len(tags)
2014 if hastags:
2026 if hastags:
2015 if ui.promptchoice(
2027 if ui.promptchoice(
2016 _(
2028 _(
2017 b'warning: tags associated with the given'
2029 b'warning: tags associated with the given'
2018 b' changeset will be lost after histedit.\n'
2030 b' changeset will be lost after histedit.\n'
2019 b'do you want to continue (yN)? $$ &Yes $$ &No'
2031 b'do you want to continue (yN)? $$ &Yes $$ &No'
2020 ),
2032 ),
2021 default=1,
2033 default=1,
2022 ):
2034 ):
2023 raise error.CanceledError(_(b'histedit cancelled\n'))
2035 raise error.CanceledError(_(b'histedit cancelled\n'))
2024 # rebuild state
2036 # rebuild state
2025 if goal == goalcontinue:
2037 if goal == goalcontinue:
2026 state.read()
2038 state.read()
2027 state = bootstrapcontinue(ui, state, opts)
2039 state = bootstrapcontinue(ui, state, opts)
2028 elif goal == goaleditplan:
2040 elif goal == goaleditplan:
2029 _edithisteditplan(ui, repo, state, rules)
2041 _edithisteditplan(ui, repo, state, rules)
2030 return
2042 return
2031 elif goal == goalabort:
2043 elif goal == goalabort:
2032 _aborthistedit(ui, repo, state, nobackup=nobackup)
2044 _aborthistedit(ui, repo, state, nobackup=nobackup)
2033 return
2045 return
2034 else:
2046 else:
2035 # goal == goalnew
2047 # goal == goalnew
2036 _newhistedit(ui, repo, state, revs, freeargs, opts)
2048 _newhistedit(ui, repo, state, revs, freeargs, opts)
2037
2049
2038 _continuehistedit(ui, repo, state)
2050 _continuehistedit(ui, repo, state)
2039 _finishhistedit(ui, repo, state, fm)
2051 _finishhistedit(ui, repo, state, fm)
2040 fm.end()
2052 fm.end()
2041
2053
2042
2054
2043 def _continuehistedit(ui, repo, state):
2055 def _continuehistedit(ui, repo, state):
2044 """This function runs after either:
2056 """This function runs after either:
2045 - bootstrapcontinue (if the goal is 'continue')
2057 - bootstrapcontinue (if the goal is 'continue')
2046 - _newhistedit (if the goal is 'new')
2058 - _newhistedit (if the goal is 'new')
2047 """
2059 """
2048 # preprocess rules so that we can hide inner folds from the user
2060 # preprocess rules so that we can hide inner folds from the user
2049 # and only show one editor
2061 # and only show one editor
2050 actions = state.actions[:]
2062 actions = state.actions[:]
2051 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2063 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2052 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2064 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2053 state.actions[idx].__class__ = _multifold
2065 state.actions[idx].__class__ = _multifold
2054
2066
2055 # Force an initial state file write, so the user can run --abort/continue
2067 # Force an initial state file write, so the user can run --abort/continue
2056 # even if there's an exception before the first transaction serialize.
2068 # even if there's an exception before the first transaction serialize.
2057 state.write()
2069 state.write()
2058
2070
2059 tr = None
2071 tr = None
2060 # Don't use singletransaction by default since it rolls the entire
2072 # Don't use singletransaction by default since it rolls the entire
2061 # transaction back if an unexpected exception happens (like a
2073 # transaction back if an unexpected exception happens (like a
2062 # pretxncommit hook throws, or the user aborts the commit msg editor).
2074 # pretxncommit hook throws, or the user aborts the commit msg editor).
2063 if ui.configbool(b"histedit", b"singletransaction"):
2075 if ui.configbool(b"histedit", b"singletransaction"):
2064 # Don't use a 'with' for the transaction, since actions may close
2076 # Don't use a 'with' for the transaction, since actions may close
2065 # and reopen a transaction. For example, if the action executes an
2077 # and reopen a transaction. For example, if the action executes an
2066 # external process it may choose to commit the transaction first.
2078 # external process it may choose to commit the transaction first.
2067 tr = repo.transaction(b'histedit')
2079 tr = repo.transaction(b'histedit')
2068 progress = ui.makeprogress(
2080 progress = ui.makeprogress(
2069 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2081 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2070 )
2082 )
2071 with progress, util.acceptintervention(tr):
2083 with progress, util.acceptintervention(tr):
2072 while state.actions:
2084 while state.actions:
2073 state.write(tr=tr)
2085 state.write(tr=tr)
2074 actobj = state.actions[0]
2086 actobj = state.actions[0]
2075 progress.increment(item=actobj.torule())
2087 progress.increment(item=actobj.torule())
2076 ui.debug(
2088 ui.debug(
2077 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2089 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2078 )
2090 )
2079 parentctx, replacement_ = actobj.run()
2091 parentctx, replacement_ = actobj.run()
2080 state.parentctxnode = parentctx.node()
2092 state.parentctxnode = parentctx.node()
2081 state.replacements.extend(replacement_)
2093 state.replacements.extend(replacement_)
2082 state.actions.pop(0)
2094 state.actions.pop(0)
2083
2095
2084 state.write()
2096 state.write()
2085
2097
2086
2098
2087 def _finishhistedit(ui, repo, state, fm):
2099 def _finishhistedit(ui, repo, state, fm):
2088 """This action runs when histedit is finishing its session"""
2100 """This action runs when histedit is finishing its session"""
2089 mergemod.update(repo[state.parentctxnode])
2101 mergemod.update(repo[state.parentctxnode])
2090
2102
2091 mapping, tmpnodes, created, ntm = processreplacement(state)
2103 mapping, tmpnodes, created, ntm = processreplacement(state)
2092 if mapping:
2104 if mapping:
2093 for prec, succs in pycompat.iteritems(mapping):
2105 for prec, succs in pycompat.iteritems(mapping):
2094 if not succs:
2106 if not succs:
2095 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2107 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2096 else:
2108 else:
2097 ui.debug(
2109 ui.debug(
2098 b'histedit: %s is replaced by %s\n'
2110 b'histedit: %s is replaced by %s\n'
2099 % (short(prec), short(succs[0]))
2111 % (short(prec), short(succs[0]))
2100 )
2112 )
2101 if len(succs) > 1:
2113 if len(succs) > 1:
2102 m = b'histedit: %s'
2114 m = b'histedit: %s'
2103 for n in succs[1:]:
2115 for n in succs[1:]:
2104 ui.debug(m % short(n))
2116 ui.debug(m % short(n))
2105
2117
2106 if not state.keep:
2118 if not state.keep:
2107 if mapping:
2119 if mapping:
2108 movetopmostbookmarks(repo, state.topmost, ntm)
2120 movetopmostbookmarks(repo, state.topmost, ntm)
2109 # TODO update mq state
2121 # TODO update mq state
2110 else:
2122 else:
2111 mapping = {}
2123 mapping = {}
2112
2124
2113 for n in tmpnodes:
2125 for n in tmpnodes:
2114 if n in repo:
2126 if n in repo:
2115 mapping[n] = ()
2127 mapping[n] = ()
2116
2128
2117 # remove entries about unknown nodes
2129 # remove entries about unknown nodes
2118 has_node = repo.unfiltered().changelog.index.has_node
2130 has_node = repo.unfiltered().changelog.index.has_node
2119 mapping = {
2131 mapping = {
2120 k: v
2132 k: v
2121 for k, v in mapping.items()
2133 for k, v in mapping.items()
2122 if has_node(k) and all(has_node(n) for n in v)
2134 if has_node(k) and all(has_node(n) for n in v)
2123 }
2135 }
2124 scmutil.cleanupnodes(repo, mapping, b'histedit')
2136 scmutil.cleanupnodes(repo, mapping, b'histedit')
2125 hf = fm.hexfunc
2137 hf = fm.hexfunc
2126 fl = fm.formatlist
2138 fl = fm.formatlist
2127 fd = fm.formatdict
2139 fd = fm.formatdict
2128 nodechanges = fd(
2140 nodechanges = fd(
2129 {
2141 {
2130 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2142 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2131 for oldn, newn in pycompat.iteritems(mapping)
2143 for oldn, newn in pycompat.iteritems(mapping)
2132 },
2144 },
2133 key=b"oldnode",
2145 key=b"oldnode",
2134 value=b"newnodes",
2146 value=b"newnodes",
2135 )
2147 )
2136 fm.data(nodechanges=nodechanges)
2148 fm.data(nodechanges=nodechanges)
2137
2149
2138 state.clear()
2150 state.clear()
2139 if os.path.exists(repo.sjoin(b'undo')):
2151 if os.path.exists(repo.sjoin(b'undo')):
2140 os.unlink(repo.sjoin(b'undo'))
2152 os.unlink(repo.sjoin(b'undo'))
2141 if repo.vfs.exists(b'histedit-last-edit.txt'):
2153 if repo.vfs.exists(b'histedit-last-edit.txt'):
2142 repo.vfs.unlink(b'histedit-last-edit.txt')
2154 repo.vfs.unlink(b'histedit-last-edit.txt')
2143
2155
2144
2156
2145 def _aborthistedit(ui, repo, state, nobackup=False):
2157 def _aborthistedit(ui, repo, state, nobackup=False):
2146 try:
2158 try:
2147 state.read()
2159 state.read()
2148 __, leafs, tmpnodes, __ = processreplacement(state)
2160 __, leafs, tmpnodes, __ = processreplacement(state)
2149 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2161 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2150
2162
2151 # Recover our old commits if necessary
2163 # Recover our old commits if necessary
2152 if not state.topmost in repo and state.backupfile:
2164 if not state.topmost in repo and state.backupfile:
2153 backupfile = repo.vfs.join(state.backupfile)
2165 backupfile = repo.vfs.join(state.backupfile)
2154 f = hg.openpath(ui, backupfile)
2166 f = hg.openpath(ui, backupfile)
2155 gen = exchange.readbundle(ui, f, backupfile)
2167 gen = exchange.readbundle(ui, f, backupfile)
2156 with repo.transaction(b'histedit.abort') as tr:
2168 with repo.transaction(b'histedit.abort') as tr:
2157 bundle2.applybundle(
2169 bundle2.applybundle(
2158 repo,
2170 repo,
2159 gen,
2171 gen,
2160 tr,
2172 tr,
2161 source=b'histedit',
2173 source=b'histedit',
2162 url=b'bundle:' + backupfile,
2174 url=b'bundle:' + backupfile,
2163 )
2175 )
2164
2176
2165 os.remove(backupfile)
2177 os.remove(backupfile)
2166
2178
2167 # check whether we should update away
2179 # check whether we should update away
2168 if repo.unfiltered().revs(
2180 if repo.unfiltered().revs(
2169 b'parents() and (%n or %ln::)',
2181 b'parents() and (%n or %ln::)',
2170 state.parentctxnode,
2182 state.parentctxnode,
2171 leafs | tmpnodes,
2183 leafs | tmpnodes,
2172 ):
2184 ):
2173 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2185 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2174 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2186 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2175 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2187 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2176 except Exception:
2188 except Exception:
2177 if state.inprogress():
2189 if state.inprogress():
2178 ui.warn(
2190 ui.warn(
2179 _(
2191 _(
2180 b'warning: encountered an exception during histedit '
2192 b'warning: encountered an exception during histedit '
2181 b'--abort; the repository may not have been completely '
2193 b'--abort; the repository may not have been completely '
2182 b'cleaned up\n'
2194 b'cleaned up\n'
2183 )
2195 )
2184 )
2196 )
2185 raise
2197 raise
2186 finally:
2198 finally:
2187 state.clear()
2199 state.clear()
2188
2200
2189
2201
2190 def hgaborthistedit(ui, repo):
2202 def hgaborthistedit(ui, repo):
2191 state = histeditstate(repo)
2203 state = histeditstate(repo)
2192 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2204 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2193 with repo.wlock() as wlock, repo.lock() as lock:
2205 with repo.wlock() as wlock, repo.lock() as lock:
2194 state.wlock = wlock
2206 state.wlock = wlock
2195 state.lock = lock
2207 state.lock = lock
2196 _aborthistedit(ui, repo, state, nobackup=nobackup)
2208 _aborthistedit(ui, repo, state, nobackup=nobackup)
2197
2209
2198
2210
2199 def _edithisteditplan(ui, repo, state, rules):
2211 def _edithisteditplan(ui, repo, state, rules):
2200 state.read()
2212 state.read()
2201 if not rules:
2213 if not rules:
2202 comment = geteditcomment(
2214 comment = geteditcomment(
2203 ui, short(state.parentctxnode), short(state.topmost)
2215 ui, short(state.parentctxnode), short(state.topmost)
2204 )
2216 )
2205 rules = ruleeditor(repo, ui, state.actions, comment)
2217 rules = ruleeditor(repo, ui, state.actions, comment)
2206 else:
2218 else:
2207 rules = _readfile(ui, rules)
2219 rules = _readfile(ui, rules)
2208 actions = parserules(rules, state)
2220 actions = parserules(rules, state)
2209 ctxs = [repo[act.node] for act in state.actions if act.node]
2221 ctxs = [repo[act.node] for act in state.actions if act.node]
2210 warnverifyactions(ui, repo, actions, state, ctxs)
2222 warnverifyactions(ui, repo, actions, state, ctxs)
2211 state.actions = actions
2223 state.actions = actions
2212 state.write()
2224 state.write()
2213
2225
2214
2226
2215 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2227 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2216 outg = opts.get(b'outgoing')
2228 outg = opts.get(b'outgoing')
2217 rules = opts.get(b'commands', b'')
2229 rules = opts.get(b'commands', b'')
2218 force = opts.get(b'force')
2230 force = opts.get(b'force')
2219
2231
2220 cmdutil.checkunfinished(repo)
2232 cmdutil.checkunfinished(repo)
2221 cmdutil.bailifchanged(repo)
2233 cmdutil.bailifchanged(repo)
2222
2234
2223 topmost = repo.dirstate.p1()
2235 topmost = repo.dirstate.p1()
2224 if outg:
2236 if outg:
2225 if freeargs:
2237 if freeargs:
2226 remote = freeargs[0]
2238 remote = freeargs[0]
2227 else:
2239 else:
2228 remote = None
2240 remote = None
2229 root = findoutgoing(ui, repo, remote, force, opts)
2241 root = findoutgoing(ui, repo, remote, force, opts)
2230 else:
2242 else:
2231 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
2243 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
2232 if len(rr) != 1:
2244 if len(rr) != 1:
2233 raise error.InputError(
2245 raise error.InputError(
2234 _(
2246 _(
2235 b'The specified revisions must have '
2247 b'The specified revisions must have '
2236 b'exactly one common root'
2248 b'exactly one common root'
2237 )
2249 )
2238 )
2250 )
2239 root = rr[0].node()
2251 root = rr[0].node()
2240
2252
2241 revs = between(repo, root, topmost, state.keep)
2253 revs = between(repo, root, topmost, state.keep)
2242 if not revs:
2254 if not revs:
2243 raise error.InputError(
2255 raise error.InputError(
2244 _(b'%s is not an ancestor of working directory') % short(root)
2256 _(b'%s is not an ancestor of working directory') % short(root)
2245 )
2257 )
2246
2258
2247 ctxs = [repo[r] for r in revs]
2259 ctxs = [repo[r] for r in revs]
2248
2260
2249 wctx = repo[None]
2261 wctx = repo[None]
2250 # Please don't ask me why `ancestors` is this value. I figured it
2262 # Please don't ask me why `ancestors` is this value. I figured it
2251 # out with print-debugging, not by actually understanding what the
2263 # out with print-debugging, not by actually understanding what the
2252 # merge code is doing. :(
2264 # merge code is doing. :(
2253 ancs = [repo[b'.']]
2265 ancs = [repo[b'.']]
2254 # Sniff-test to make sure we won't collide with untracked files in
2266 # Sniff-test to make sure we won't collide with untracked files in
2255 # the working directory. If we don't do this, we can get a
2267 # the working directory. If we don't do this, we can get a
2256 # collision after we've started histedit and backing out gets ugly
2268 # collision after we've started histedit and backing out gets ugly
2257 # for everyone, especially the user.
2269 # for everyone, especially the user.
2258 for c in [ctxs[0].p1()] + ctxs:
2270 for c in [ctxs[0].p1()] + ctxs:
2259 try:
2271 try:
2260 mergemod.calculateupdates(
2272 mergemod.calculateupdates(
2261 repo,
2273 repo,
2262 wctx,
2274 wctx,
2263 c,
2275 c,
2264 ancs,
2276 ancs,
2265 # These parameters were determined by print-debugging
2277 # These parameters were determined by print-debugging
2266 # what happens later on inside histedit.
2278 # what happens later on inside histedit.
2267 branchmerge=False,
2279 branchmerge=False,
2268 force=False,
2280 force=False,
2269 acceptremote=False,
2281 acceptremote=False,
2270 followcopies=False,
2282 followcopies=False,
2271 )
2283 )
2272 except error.Abort:
2284 except error.Abort:
2273 raise error.StateError(
2285 raise error.StateError(
2274 _(
2286 _(
2275 b"untracked files in working directory conflict with files in %s"
2287 b"untracked files in working directory conflict with files in %s"
2276 )
2288 )
2277 % c
2289 % c
2278 )
2290 )
2279
2291
2280 if not rules:
2292 if not rules:
2281 comment = geteditcomment(ui, short(root), short(topmost))
2293 comment = geteditcomment(ui, short(root), short(topmost))
2282 actions = [pick(state, r) for r in revs]
2294 actions = [pick(state, r) for r in revs]
2283 rules = ruleeditor(repo, ui, actions, comment)
2295 rules = ruleeditor(repo, ui, actions, comment)
2284 else:
2296 else:
2285 rules = _readfile(ui, rules)
2297 rules = _readfile(ui, rules)
2286 actions = parserules(rules, state)
2298 actions = parserules(rules, state)
2287 warnverifyactions(ui, repo, actions, state, ctxs)
2299 warnverifyactions(ui, repo, actions, state, ctxs)
2288
2300
2289 parentctxnode = repo[root].p1().node()
2301 parentctxnode = repo[root].p1().node()
2290
2302
2291 state.parentctxnode = parentctxnode
2303 state.parentctxnode = parentctxnode
2292 state.actions = actions
2304 state.actions = actions
2293 state.topmost = topmost
2305 state.topmost = topmost
2294 state.replacements = []
2306 state.replacements = []
2295
2307
2296 ui.log(
2308 ui.log(
2297 b"histedit",
2309 b"histedit",
2298 b"%d actions to histedit\n",
2310 b"%d actions to histedit\n",
2299 len(actions),
2311 len(actions),
2300 histedit_num_actions=len(actions),
2312 histedit_num_actions=len(actions),
2301 )
2313 )
2302
2314
2303 # Create a backup so we can always abort completely.
2315 # Create a backup so we can always abort completely.
2304 backupfile = None
2316 backupfile = None
2305 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2317 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2306 backupfile = repair.backupbundle(
2318 backupfile = repair.backupbundle(
2307 repo, [parentctxnode], [topmost], root, b'histedit'
2319 repo, [parentctxnode], [topmost], root, b'histedit'
2308 )
2320 )
2309 state.backupfile = backupfile
2321 state.backupfile = backupfile
2310
2322
2311
2323
2312 def _getsummary(ctx):
2324 def _getsummary(ctx):
2313 # a common pattern is to extract the summary but default to the empty
2325 # a common pattern is to extract the summary but default to the empty
2314 # string
2326 # string
2315 summary = ctx.description() or b''
2327 summary = ctx.description() or b''
2316 if summary:
2328 if summary:
2317 summary = summary.splitlines()[0]
2329 summary = summary.splitlines()[0]
2318 return summary
2330 return summary
2319
2331
2320
2332
2321 def bootstrapcontinue(ui, state, opts):
2333 def bootstrapcontinue(ui, state, opts):
2322 repo = state.repo
2334 repo = state.repo
2323
2335
2324 ms = mergestatemod.mergestate.read(repo)
2336 ms = mergestatemod.mergestate.read(repo)
2325 mergeutil.checkunresolved(ms)
2337 mergeutil.checkunresolved(ms)
2326
2338
2327 if state.actions:
2339 if state.actions:
2328 actobj = state.actions.pop(0)
2340 actobj = state.actions.pop(0)
2329
2341
2330 if _isdirtywc(repo):
2342 if _isdirtywc(repo):
2331 actobj.continuedirty()
2343 actobj.continuedirty()
2332 if _isdirtywc(repo):
2344 if _isdirtywc(repo):
2333 abortdirty()
2345 abortdirty()
2334
2346
2335 parentctx, replacements = actobj.continueclean()
2347 parentctx, replacements = actobj.continueclean()
2336
2348
2337 state.parentctxnode = parentctx.node()
2349 state.parentctxnode = parentctx.node()
2338 state.replacements.extend(replacements)
2350 state.replacements.extend(replacements)
2339
2351
2340 return state
2352 return state
2341
2353
2342
2354
2343 def between(repo, old, new, keep):
2355 def between(repo, old, new, keep):
2344 """select and validate the set of revision to edit
2356 """select and validate the set of revision to edit
2345
2357
2346 When keep is false, the specified set can't have children."""
2358 When keep is false, the specified set can't have children."""
2347 revs = repo.revs(b'%n::%n', old, new)
2359 revs = repo.revs(b'%n::%n', old, new)
2348 if revs and not keep:
2360 if revs and not keep:
2349 rewriteutil.precheck(repo, revs, b'edit')
2361 rewriteutil.precheck(repo, revs, b'edit')
2350 if repo.revs(b'(%ld) and merge()', revs):
2362 if repo.revs(b'(%ld) and merge()', revs):
2351 raise error.StateError(
2363 raise error.StateError(
2352 _(b'cannot edit history that contains merges')
2364 _(b'cannot edit history that contains merges')
2353 )
2365 )
2354 return pycompat.maplist(repo.changelog.node, revs)
2366 return pycompat.maplist(repo.changelog.node, revs)
2355
2367
2356
2368
2357 def ruleeditor(repo, ui, actions, editcomment=b""):
2369 def ruleeditor(repo, ui, actions, editcomment=b""):
2358 """open an editor to edit rules
2370 """open an editor to edit rules
2359
2371
2360 rules are in the format [ [act, ctx], ...] like in state.rules
2372 rules are in the format [ [act, ctx], ...] like in state.rules
2361 """
2373 """
2362 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2374 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2363 newact = util.sortdict()
2375 newact = util.sortdict()
2364 for act in actions:
2376 for act in actions:
2365 ctx = repo[act.node]
2377 ctx = repo[act.node]
2366 summary = _getsummary(ctx)
2378 summary = _getsummary(ctx)
2367 fword = summary.split(b' ', 1)[0].lower()
2379 fword = summary.split(b' ', 1)[0].lower()
2368 added = False
2380 added = False
2369
2381
2370 # if it doesn't end with the special character '!' just skip this
2382 # if it doesn't end with the special character '!' just skip this
2371 if fword.endswith(b'!'):
2383 if fword.endswith(b'!'):
2372 fword = fword[:-1]
2384 fword = fword[:-1]
2373 if fword in primaryactions | secondaryactions | tertiaryactions:
2385 if fword in primaryactions | secondaryactions | tertiaryactions:
2374 act.verb = fword
2386 act.verb = fword
2375 # get the target summary
2387 # get the target summary
2376 tsum = summary[len(fword) + 1 :].lstrip()
2388 tsum = summary[len(fword) + 1 :].lstrip()
2377 # safe but slow: reverse iterate over the actions so we
2389 # safe but slow: reverse iterate over the actions so we
2378 # don't clash on two commits having the same summary
2390 # don't clash on two commits having the same summary
2379 for na, l in reversed(list(pycompat.iteritems(newact))):
2391 for na, l in reversed(list(pycompat.iteritems(newact))):
2380 actx = repo[na.node]
2392 actx = repo[na.node]
2381 asum = _getsummary(actx)
2393 asum = _getsummary(actx)
2382 if asum == tsum:
2394 if asum == tsum:
2383 added = True
2395 added = True
2384 l.append(act)
2396 l.append(act)
2385 break
2397 break
2386
2398
2387 if not added:
2399 if not added:
2388 newact[act] = []
2400 newact[act] = []
2389
2401
2390 # copy over and flatten the new list
2402 # copy over and flatten the new list
2391 actions = []
2403 actions = []
2392 for na, l in pycompat.iteritems(newact):
2404 for na, l in pycompat.iteritems(newact):
2393 actions.append(na)
2405 actions.append(na)
2394 actions += l
2406 actions += l
2395
2407
2396 rules = b'\n'.join([act.torule() for act in actions])
2408 rules = b'\n'.join([act.torule() for act in actions])
2397 rules += b'\n\n'
2409 rules += b'\n\n'
2398 rules += editcomment
2410 rules += editcomment
2399 rules = ui.edit(
2411 rules = ui.edit(
2400 rules,
2412 rules,
2401 ui.username(),
2413 ui.username(),
2402 {b'prefix': b'histedit'},
2414 {b'prefix': b'histedit'},
2403 repopath=repo.path,
2415 repopath=repo.path,
2404 action=b'histedit',
2416 action=b'histedit',
2405 )
2417 )
2406
2418
2407 # Save edit rules in .hg/histedit-last-edit.txt in case
2419 # Save edit rules in .hg/histedit-last-edit.txt in case
2408 # the user needs to ask for help after something
2420 # the user needs to ask for help after something
2409 # surprising happens.
2421 # surprising happens.
2410 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2422 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2411 f.write(rules)
2423 f.write(rules)
2412
2424
2413 return rules
2425 return rules
2414
2426
2415
2427
2416 def parserules(rules, state):
2428 def parserules(rules, state):
2417 """Read the histedit rules string and return list of action objects"""
2429 """Read the histedit rules string and return list of action objects"""
2418 rules = [
2430 rules = [
2419 l
2431 l
2420 for l in (r.strip() for r in rules.splitlines())
2432 for l in (r.strip() for r in rules.splitlines())
2421 if l and not l.startswith(b'#')
2433 if l and not l.startswith(b'#')
2422 ]
2434 ]
2423 actions = []
2435 actions = []
2424 for r in rules:
2436 for r in rules:
2425 if b' ' not in r:
2437 if b' ' not in r:
2426 raise error.ParseError(_(b'malformed line "%s"') % r)
2438 raise error.ParseError(_(b'malformed line "%s"') % r)
2427 verb, rest = r.split(b' ', 1)
2439 verb, rest = r.split(b' ', 1)
2428
2440
2429 if verb not in actiontable:
2441 if verb not in actiontable:
2430 raise error.ParseError(_(b'unknown action "%s"') % verb)
2442 raise error.ParseError(_(b'unknown action "%s"') % verb)
2431
2443
2432 action = actiontable[verb].fromrule(state, rest)
2444 action = actiontable[verb].fromrule(state, rest)
2433 actions.append(action)
2445 actions.append(action)
2434 return actions
2446 return actions
2435
2447
2436
2448
2437 def warnverifyactions(ui, repo, actions, state, ctxs):
2449 def warnverifyactions(ui, repo, actions, state, ctxs):
2438 try:
2450 try:
2439 verifyactions(actions, state, ctxs)
2451 verifyactions(actions, state, ctxs)
2440 except error.ParseError:
2452 except error.ParseError:
2441 if repo.vfs.exists(b'histedit-last-edit.txt'):
2453 if repo.vfs.exists(b'histedit-last-edit.txt'):
2442 ui.warn(
2454 ui.warn(
2443 _(
2455 _(
2444 b'warning: histedit rules saved '
2456 b'warning: histedit rules saved '
2445 b'to: .hg/histedit-last-edit.txt\n'
2457 b'to: .hg/histedit-last-edit.txt\n'
2446 )
2458 )
2447 )
2459 )
2448 raise
2460 raise
2449
2461
2450
2462
2451 def verifyactions(actions, state, ctxs):
2463 def verifyactions(actions, state, ctxs):
2452 """Verify that there exists exactly one action per given changeset and
2464 """Verify that there exists exactly one action per given changeset and
2453 other constraints.
2465 other constraints.
2454
2466
2455 Will abort if there are to many or too few rules, a malformed rule,
2467 Will abort if there are to many or too few rules, a malformed rule,
2456 or a rule on a changeset outside of the user-given range.
2468 or a rule on a changeset outside of the user-given range.
2457 """
2469 """
2458 expected = {c.node() for c in ctxs}
2470 expected = {c.node() for c in ctxs}
2459 seen = set()
2471 seen = set()
2460 prev = None
2472 prev = None
2461
2473
2462 if actions and actions[0].verb in [b'roll', b'fold']:
2474 if actions and actions[0].verb in [b'roll', b'fold']:
2463 raise error.ParseError(
2475 raise error.ParseError(
2464 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2476 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2465 )
2477 )
2466
2478
2467 for action in actions:
2479 for action in actions:
2468 action.verify(prev, expected, seen)
2480 action.verify(prev, expected, seen)
2469 prev = action
2481 prev = action
2470 if action.node is not None:
2482 if action.node is not None:
2471 seen.add(action.node)
2483 seen.add(action.node)
2472 missing = sorted(expected - seen) # sort to stabilize output
2484 missing = sorted(expected - seen) # sort to stabilize output
2473
2485
2474 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2486 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2475 if len(actions) == 0:
2487 if len(actions) == 0:
2476 raise error.ParseError(
2488 raise error.ParseError(
2477 _(b'no rules provided'),
2489 _(b'no rules provided'),
2478 hint=_(b'use strip extension to remove commits'),
2490 hint=_(b'use strip extension to remove commits'),
2479 )
2491 )
2480
2492
2481 drops = [drop(state, n) for n in missing]
2493 drops = [drop(state, n) for n in missing]
2482 # put the in the beginning so they execute immediately and
2494 # put the in the beginning so they execute immediately and
2483 # don't show in the edit-plan in the future
2495 # don't show in the edit-plan in the future
2484 actions[:0] = drops
2496 actions[:0] = drops
2485 elif missing:
2497 elif missing:
2486 raise error.ParseError(
2498 raise error.ParseError(
2487 _(b'missing rules for changeset %s') % short(missing[0]),
2499 _(b'missing rules for changeset %s') % short(missing[0]),
2488 hint=_(
2500 hint=_(
2489 b'use "drop %s" to discard, see also: '
2501 b'use "drop %s" to discard, see also: '
2490 b"'hg help -e histedit.config'"
2502 b"'hg help -e histedit.config'"
2491 )
2503 )
2492 % short(missing[0]),
2504 % short(missing[0]),
2493 )
2505 )
2494
2506
2495
2507
2496 def adjustreplacementsfrommarkers(repo, oldreplacements):
2508 def adjustreplacementsfrommarkers(repo, oldreplacements):
2497 """Adjust replacements from obsolescence markers
2509 """Adjust replacements from obsolescence markers
2498
2510
2499 Replacements structure is originally generated based on
2511 Replacements structure is originally generated based on
2500 histedit's state and does not account for changes that are
2512 histedit's state and does not account for changes that are
2501 not recorded there. This function fixes that by adding
2513 not recorded there. This function fixes that by adding
2502 data read from obsolescence markers"""
2514 data read from obsolescence markers"""
2503 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2515 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2504 return oldreplacements
2516 return oldreplacements
2505
2517
2506 unfi = repo.unfiltered()
2518 unfi = repo.unfiltered()
2507 get_rev = unfi.changelog.index.get_rev
2519 get_rev = unfi.changelog.index.get_rev
2508 obsstore = repo.obsstore
2520 obsstore = repo.obsstore
2509 newreplacements = list(oldreplacements)
2521 newreplacements = list(oldreplacements)
2510 oldsuccs = [r[1] for r in oldreplacements]
2522 oldsuccs = [r[1] for r in oldreplacements]
2511 # successors that have already been added to succstocheck once
2523 # successors that have already been added to succstocheck once
2512 seensuccs = set().union(
2524 seensuccs = set().union(
2513 *oldsuccs
2525 *oldsuccs
2514 ) # create a set from an iterable of tuples
2526 ) # create a set from an iterable of tuples
2515 succstocheck = list(seensuccs)
2527 succstocheck = list(seensuccs)
2516 while succstocheck:
2528 while succstocheck:
2517 n = succstocheck.pop()
2529 n = succstocheck.pop()
2518 missing = get_rev(n) is None
2530 missing = get_rev(n) is None
2519 markers = obsstore.successors.get(n, ())
2531 markers = obsstore.successors.get(n, ())
2520 if missing and not markers:
2532 if missing and not markers:
2521 # dead end, mark it as such
2533 # dead end, mark it as such
2522 newreplacements.append((n, ()))
2534 newreplacements.append((n, ()))
2523 for marker in markers:
2535 for marker in markers:
2524 nsuccs = marker[1]
2536 nsuccs = marker[1]
2525 newreplacements.append((n, nsuccs))
2537 newreplacements.append((n, nsuccs))
2526 for nsucc in nsuccs:
2538 for nsucc in nsuccs:
2527 if nsucc not in seensuccs:
2539 if nsucc not in seensuccs:
2528 seensuccs.add(nsucc)
2540 seensuccs.add(nsucc)
2529 succstocheck.append(nsucc)
2541 succstocheck.append(nsucc)
2530
2542
2531 return newreplacements
2543 return newreplacements
2532
2544
2533
2545
2534 def processreplacement(state):
2546 def processreplacement(state):
2535 """process the list of replacements to return
2547 """process the list of replacements to return
2536
2548
2537 1) the final mapping between original and created nodes
2549 1) the final mapping between original and created nodes
2538 2) the list of temporary node created by histedit
2550 2) the list of temporary node created by histedit
2539 3) the list of new commit created by histedit"""
2551 3) the list of new commit created by histedit"""
2540 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2552 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2541 allsuccs = set()
2553 allsuccs = set()
2542 replaced = set()
2554 replaced = set()
2543 fullmapping = {}
2555 fullmapping = {}
2544 # initialize basic set
2556 # initialize basic set
2545 # fullmapping records all operations recorded in replacement
2557 # fullmapping records all operations recorded in replacement
2546 for rep in replacements:
2558 for rep in replacements:
2547 allsuccs.update(rep[1])
2559 allsuccs.update(rep[1])
2548 replaced.add(rep[0])
2560 replaced.add(rep[0])
2549 fullmapping.setdefault(rep[0], set()).update(rep[1])
2561 fullmapping.setdefault(rep[0], set()).update(rep[1])
2550 new = allsuccs - replaced
2562 new = allsuccs - replaced
2551 tmpnodes = allsuccs & replaced
2563 tmpnodes = allsuccs & replaced
2552 # Reduce content fullmapping into direct relation between original nodes
2564 # Reduce content fullmapping into direct relation between original nodes
2553 # and final node created during history edition
2565 # and final node created during history edition
2554 # Dropped changeset are replaced by an empty list
2566 # Dropped changeset are replaced by an empty list
2555 toproceed = set(fullmapping)
2567 toproceed = set(fullmapping)
2556 final = {}
2568 final = {}
2557 while toproceed:
2569 while toproceed:
2558 for x in list(toproceed):
2570 for x in list(toproceed):
2559 succs = fullmapping[x]
2571 succs = fullmapping[x]
2560 for s in list(succs):
2572 for s in list(succs):
2561 if s in toproceed:
2573 if s in toproceed:
2562 # non final node with unknown closure
2574 # non final node with unknown closure
2563 # We can't process this now
2575 # We can't process this now
2564 break
2576 break
2565 elif s in final:
2577 elif s in final:
2566 # non final node, replace with closure
2578 # non final node, replace with closure
2567 succs.remove(s)
2579 succs.remove(s)
2568 succs.update(final[s])
2580 succs.update(final[s])
2569 else:
2581 else:
2570 final[x] = succs
2582 final[x] = succs
2571 toproceed.remove(x)
2583 toproceed.remove(x)
2572 # remove tmpnodes from final mapping
2584 # remove tmpnodes from final mapping
2573 for n in tmpnodes:
2585 for n in tmpnodes:
2574 del final[n]
2586 del final[n]
2575 # we expect all changes involved in final to exist in the repo
2587 # we expect all changes involved in final to exist in the repo
2576 # turn `final` into list (topologically sorted)
2588 # turn `final` into list (topologically sorted)
2577 get_rev = state.repo.changelog.index.get_rev
2589 get_rev = state.repo.changelog.index.get_rev
2578 for prec, succs in final.items():
2590 for prec, succs in final.items():
2579 final[prec] = sorted(succs, key=get_rev)
2591 final[prec] = sorted(succs, key=get_rev)
2580
2592
2581 # computed topmost element (necessary for bookmark)
2593 # computed topmost element (necessary for bookmark)
2582 if new:
2594 if new:
2583 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2595 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2584 elif not final:
2596 elif not final:
2585 # Nothing rewritten at all. we won't need `newtopmost`
2597 # Nothing rewritten at all. we won't need `newtopmost`
2586 # It is the same as `oldtopmost` and `processreplacement` know it
2598 # It is the same as `oldtopmost` and `processreplacement` know it
2587 newtopmost = None
2599 newtopmost = None
2588 else:
2600 else:
2589 # every body died. The newtopmost is the parent of the root.
2601 # every body died. The newtopmost is the parent of the root.
2590 r = state.repo.changelog.rev
2602 r = state.repo.changelog.rev
2591 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2603 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2592
2604
2593 return final, tmpnodes, new, newtopmost
2605 return final, tmpnodes, new, newtopmost
2594
2606
2595
2607
2596 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2608 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2597 """Move bookmark from oldtopmost to newly created topmost
2609 """Move bookmark from oldtopmost to newly created topmost
2598
2610
2599 This is arguably a feature and we may only want that for the active
2611 This is arguably a feature and we may only want that for the active
2600 bookmark. But the behavior is kept compatible with the old version for now.
2612 bookmark. But the behavior is kept compatible with the old version for now.
2601 """
2613 """
2602 if not oldtopmost or not newtopmost:
2614 if not oldtopmost or not newtopmost:
2603 return
2615 return
2604 oldbmarks = repo.nodebookmarks(oldtopmost)
2616 oldbmarks = repo.nodebookmarks(oldtopmost)
2605 if oldbmarks:
2617 if oldbmarks:
2606 with repo.lock(), repo.transaction(b'histedit') as tr:
2618 with repo.lock(), repo.transaction(b'histedit') as tr:
2607 marks = repo._bookmarks
2619 marks = repo._bookmarks
2608 changes = []
2620 changes = []
2609 for name in oldbmarks:
2621 for name in oldbmarks:
2610 changes.append((name, newtopmost))
2622 changes.append((name, newtopmost))
2611 marks.applychanges(repo, tr, changes)
2623 marks.applychanges(repo, tr, changes)
2612
2624
2613
2625
2614 def cleanupnode(ui, repo, nodes, nobackup=False):
2626 def cleanupnode(ui, repo, nodes, nobackup=False):
2615 """strip a group of nodes from the repository
2627 """strip a group of nodes from the repository
2616
2628
2617 The set of node to strip may contains unknown nodes."""
2629 The set of node to strip may contains unknown nodes."""
2618 with repo.lock():
2630 with repo.lock():
2619 # do not let filtering get in the way of the cleanse
2631 # do not let filtering get in the way of the cleanse
2620 # we should probably get rid of obsolescence marker created during the
2632 # we should probably get rid of obsolescence marker created during the
2621 # histedit, but we currently do not have such information.
2633 # histedit, but we currently do not have such information.
2622 repo = repo.unfiltered()
2634 repo = repo.unfiltered()
2623 # Find all nodes that need to be stripped
2635 # Find all nodes that need to be stripped
2624 # (we use %lr instead of %ln to silently ignore unknown items)
2636 # (we use %lr instead of %ln to silently ignore unknown items)
2625 has_node = repo.changelog.index.has_node
2637 has_node = repo.changelog.index.has_node
2626 nodes = sorted(n for n in nodes if has_node(n))
2638 nodes = sorted(n for n in nodes if has_node(n))
2627 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2639 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2628 if roots:
2640 if roots:
2629 backup = not nobackup
2641 backup = not nobackup
2630 repair.strip(ui, repo, roots, backup=backup)
2642 repair.strip(ui, repo, roots, backup=backup)
2631
2643
2632
2644
2633 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2645 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2634 if isinstance(nodelist, bytes):
2646 if isinstance(nodelist, bytes):
2635 nodelist = [nodelist]
2647 nodelist = [nodelist]
2636 state = histeditstate(repo)
2648 state = histeditstate(repo)
2637 if state.inprogress():
2649 if state.inprogress():
2638 state.read()
2650 state.read()
2639 histedit_nodes = {
2651 histedit_nodes = {
2640 action.node for action in state.actions if action.node
2652 action.node for action in state.actions if action.node
2641 }
2653 }
2642 common_nodes = histedit_nodes & set(nodelist)
2654 common_nodes = histedit_nodes & set(nodelist)
2643 if common_nodes:
2655 if common_nodes:
2644 raise error.Abort(
2656 raise error.Abort(
2645 _(b"histedit in progress, can't strip %s")
2657 _(b"histedit in progress, can't strip %s")
2646 % b', '.join(short(x) for x in common_nodes)
2658 % b', '.join(short(x) for x in common_nodes)
2647 )
2659 )
2648 return orig(ui, repo, nodelist, *args, **kwargs)
2660 return orig(ui, repo, nodelist, *args, **kwargs)
2649
2661
2650
2662
2651 extensions.wrapfunction(repair, b'strip', stripwrapper)
2663 extensions.wrapfunction(repair, b'strip', stripwrapper)
2652
2664
2653
2665
2654 def summaryhook(ui, repo):
2666 def summaryhook(ui, repo):
2655 state = histeditstate(repo)
2667 state = histeditstate(repo)
2656 if not state.inprogress():
2668 if not state.inprogress():
2657 return
2669 return
2658 state.read()
2670 state.read()
2659 if state.actions:
2671 if state.actions:
2660 # i18n: column positioning for "hg summary"
2672 # i18n: column positioning for "hg summary"
2661 ui.write(
2673 ui.write(
2662 _(b'hist: %s (histedit --continue)\n')
2674 _(b'hist: %s (histedit --continue)\n')
2663 % (
2675 % (
2664 ui.label(_(b'%d remaining'), b'histedit.remaining')
2676 ui.label(_(b'%d remaining'), b'histedit.remaining')
2665 % len(state.actions)
2677 % len(state.actions)
2666 )
2678 )
2667 )
2679 )
2668
2680
2669
2681
2670 def extsetup(ui):
2682 def extsetup(ui):
2671 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2683 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2672 statemod.addunfinished(
2684 statemod.addunfinished(
2673 b'histedit',
2685 b'histedit',
2674 fname=b'histedit-state',
2686 fname=b'histedit-state',
2675 allowcommit=True,
2687 allowcommit=True,
2676 continueflag=True,
2688 continueflag=True,
2677 abortfunc=hgaborthistedit,
2689 abortfunc=hgaborthistedit,
2678 )
2690 )
@@ -1,889 +1,896 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
86 from __future__ import absolute_import
87
87
88 import os
88 import os
89 import re
89 import re
90 import weakref
90 import weakref
91
91
92 from mercurial.i18n import _
92 from mercurial.i18n import _
93 from mercurial.pycompat import getattr
93 from mercurial.pycompat import getattr
94 from mercurial.hgweb import webcommands
94 from mercurial.hgweb import webcommands
95
95
96 from mercurial import (
96 from mercurial import (
97 cmdutil,
97 cmdutil,
98 context,
98 context,
99 dispatch,
99 dispatch,
100 error,
100 error,
101 extensions,
101 extensions,
102 filelog,
102 filelog,
103 localrepo,
103 localrepo,
104 logcmdutil,
104 logcmdutil,
105 match,
105 match,
106 patch,
106 patch,
107 pathutil,
107 pathutil,
108 pycompat,
108 pycompat,
109 registrar,
109 registrar,
110 scmutil,
110 scmutil,
111 templatefilters,
111 templatefilters,
112 templateutil,
112 templateutil,
113 util,
113 util,
114 )
114 )
115 from mercurial.utils import (
115 from mercurial.utils import (
116 dateutil,
116 dateutil,
117 stringutil,
117 stringutil,
118 )
118 )
119 from mercurial.dirstateutils import timestamp
119
120
120 cmdtable = {}
121 cmdtable = {}
121 command = registrar.command(cmdtable)
122 command = registrar.command(cmdtable)
122 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
123 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
123 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
124 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
124 # be specifying the version(s) of Mercurial they are tested with, or
125 # be specifying the version(s) of Mercurial they are tested with, or
125 # leave the attribute unspecified.
126 # leave the attribute unspecified.
126 testedwith = b'ships-with-hg-core'
127 testedwith = b'ships-with-hg-core'
127
128
128 # hg commands that do not act on keywords
129 # hg commands that do not act on keywords
129 nokwcommands = (
130 nokwcommands = (
130 b'add addremove annotate bundle export grep incoming init log'
131 b'add addremove annotate bundle export grep incoming init log'
131 b' outgoing push tip verify convert email glog'
132 b' outgoing push tip verify convert email glog'
132 )
133 )
133
134
134 # webcommands that do not act on keywords
135 # webcommands that do not act on keywords
135 nokwwebcommands = b'annotate changeset rev filediff diff comparison'
136 nokwwebcommands = b'annotate changeset rev filediff diff comparison'
136
137
137 # hg commands that trigger expansion only when writing to working dir,
138 # hg commands that trigger expansion only when writing to working dir,
138 # not when reading filelog, and unexpand when reading from working dir
139 # not when reading filelog, and unexpand when reading from working dir
139 restricted = (
140 restricted = (
140 b'merge kwexpand kwshrink record qrecord resolve transplant'
141 b'merge kwexpand kwshrink record qrecord resolve transplant'
141 b' unshelve rebase graft backout histedit fetch'
142 b' unshelve rebase graft backout histedit fetch'
142 )
143 )
143
144
144 # names of extensions using dorecord
145 # names of extensions using dorecord
145 recordextensions = b'record'
146 recordextensions = b'record'
146
147
147 colortable = {
148 colortable = {
148 b'kwfiles.enabled': b'green bold',
149 b'kwfiles.enabled': b'green bold',
149 b'kwfiles.deleted': b'cyan bold underline',
150 b'kwfiles.deleted': b'cyan bold underline',
150 b'kwfiles.enabledunknown': b'green',
151 b'kwfiles.enabledunknown': b'green',
151 b'kwfiles.ignored': b'bold',
152 b'kwfiles.ignored': b'bold',
152 b'kwfiles.ignoredunknown': b'none',
153 b'kwfiles.ignoredunknown': b'none',
153 }
154 }
154
155
155 templatefilter = registrar.templatefilter()
156 templatefilter = registrar.templatefilter()
156
157
157 configtable = {}
158 configtable = {}
158 configitem = registrar.configitem(configtable)
159 configitem = registrar.configitem(configtable)
159
160
160 configitem(
161 configitem(
161 b'keywordset',
162 b'keywordset',
162 b'svn',
163 b'svn',
163 default=False,
164 default=False,
164 )
165 )
165 # date like in cvs' $Date
166 # date like in cvs' $Date
166 @templatefilter(b'utcdate', intype=templateutil.date)
167 @templatefilter(b'utcdate', intype=templateutil.date)
167 def utcdate(date):
168 def utcdate(date):
168 """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
169 """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
169 dateformat = b'%Y/%m/%d %H:%M:%S'
170 dateformat = b'%Y/%m/%d %H:%M:%S'
170 return dateutil.datestr((date[0], 0), dateformat)
171 return dateutil.datestr((date[0], 0), dateformat)
171
172
172
173
173 # date like in svn's $Date
174 # date like in svn's $Date
174 @templatefilter(b'svnisodate', intype=templateutil.date)
175 @templatefilter(b'svnisodate', intype=templateutil.date)
175 def svnisodate(date):
176 def svnisodate(date):
176 """Date. Returns a date in this format: "2009-08-18 13:00:13
177 """Date. Returns a date in this format: "2009-08-18 13:00:13
177 +0200 (Tue, 18 Aug 2009)".
178 +0200 (Tue, 18 Aug 2009)".
178 """
179 """
179 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
180 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
180
181
181
182
182 # date like in svn's $Id
183 # date like in svn's $Id
183 @templatefilter(b'svnutcdate', intype=templateutil.date)
184 @templatefilter(b'svnutcdate', intype=templateutil.date)
184 def svnutcdate(date):
185 def svnutcdate(date):
185 """Date. Returns a UTC-date in this format: "2009-08-18
186 """Date. Returns a UTC-date in this format: "2009-08-18
186 11:00:13Z".
187 11:00:13Z".
187 """
188 """
188 dateformat = b'%Y-%m-%d %H:%M:%SZ'
189 dateformat = b'%Y-%m-%d %H:%M:%SZ'
189 return dateutil.datestr((date[0], 0), dateformat)
190 return dateutil.datestr((date[0], 0), dateformat)
190
191
191
192
192 # make keyword tools accessible
193 # make keyword tools accessible
193 kwtools = {b'hgcmd': b''}
194 kwtools = {b'hgcmd': b''}
194
195
195
196
196 def _defaultkwmaps(ui):
197 def _defaultkwmaps(ui):
197 '''Returns default keywordmaps according to keywordset configuration.'''
198 '''Returns default keywordmaps according to keywordset configuration.'''
198 templates = {
199 templates = {
199 b'Revision': b'{node|short}',
200 b'Revision': b'{node|short}',
200 b'Author': b'{author|user}',
201 b'Author': b'{author|user}',
201 }
202 }
202 kwsets = (
203 kwsets = (
203 {
204 {
204 b'Date': b'{date|utcdate}',
205 b'Date': b'{date|utcdate}',
205 b'RCSfile': b'{file|basename},v',
206 b'RCSfile': b'{file|basename},v',
206 b'RCSFile': b'{file|basename},v', # kept for backwards compatibility
207 b'RCSFile': b'{file|basename},v', # kept for backwards compatibility
207 # with hg-keyword
208 # with hg-keyword
208 b'Source': b'{root}/{file},v',
209 b'Source': b'{root}/{file},v',
209 b'Id': b'{file|basename},v {node|short} {date|utcdate} {author|user}',
210 b'Id': b'{file|basename},v {node|short} {date|utcdate} {author|user}',
210 b'Header': b'{root}/{file},v {node|short} {date|utcdate} {author|user}',
211 b'Header': b'{root}/{file},v {node|short} {date|utcdate} {author|user}',
211 },
212 },
212 {
213 {
213 b'Date': b'{date|svnisodate}',
214 b'Date': b'{date|svnisodate}',
214 b'Id': b'{file|basename},v {node|short} {date|svnutcdate} {author|user}',
215 b'Id': b'{file|basename},v {node|short} {date|svnutcdate} {author|user}',
215 b'LastChangedRevision': b'{node|short}',
216 b'LastChangedRevision': b'{node|short}',
216 b'LastChangedBy': b'{author|user}',
217 b'LastChangedBy': b'{author|user}',
217 b'LastChangedDate': b'{date|svnisodate}',
218 b'LastChangedDate': b'{date|svnisodate}',
218 },
219 },
219 )
220 )
220 templates.update(kwsets[ui.configbool(b'keywordset', b'svn')])
221 templates.update(kwsets[ui.configbool(b'keywordset', b'svn')])
221 return templates
222 return templates
222
223
223
224
224 def _shrinktext(text, subfunc):
225 def _shrinktext(text, subfunc):
225 """Helper for keyword expansion removal in text.
226 """Helper for keyword expansion removal in text.
226 Depending on subfunc also returns number of substitutions."""
227 Depending on subfunc also returns number of substitutions."""
227 return subfunc(br'$\1$', text)
228 return subfunc(br'$\1$', text)
228
229
229
230
230 def _preselect(wstatus, changed):
231 def _preselect(wstatus, changed):
231 """Retrieves modified and added files from a working directory state
232 """Retrieves modified and added files from a working directory state
232 and returns the subset of each contained in given changed files
233 and returns the subset of each contained in given changed files
233 retrieved from a change context."""
234 retrieved from a change context."""
234 modified = [f for f in wstatus.modified if f in changed]
235 modified = [f for f in wstatus.modified if f in changed]
235 added = [f for f in wstatus.added if f in changed]
236 added = [f for f in wstatus.added if f in changed]
236 return modified, added
237 return modified, added
237
238
238
239
239 class kwtemplater(object):
240 class kwtemplater(object):
240 """
241 """
241 Sets up keyword templates, corresponding keyword regex, and
242 Sets up keyword templates, corresponding keyword regex, and
242 provides keyword substitution functions.
243 provides keyword substitution functions.
243 """
244 """
244
245
245 def __init__(self, ui, repo, inc, exc):
246 def __init__(self, ui, repo, inc, exc):
246 self.ui = ui
247 self.ui = ui
247 self._repo = weakref.ref(repo)
248 self._repo = weakref.ref(repo)
248 self.match = match.match(repo.root, b'', [], inc, exc)
249 self.match = match.match(repo.root, b'', [], inc, exc)
249 self.restrict = kwtools[b'hgcmd'] in restricted.split()
250 self.restrict = kwtools[b'hgcmd'] in restricted.split()
250 self.postcommit = False
251 self.postcommit = False
251
252
252 kwmaps = self.ui.configitems(b'keywordmaps')
253 kwmaps = self.ui.configitems(b'keywordmaps')
253 if kwmaps: # override default templates
254 if kwmaps: # override default templates
254 self.templates = dict(kwmaps)
255 self.templates = dict(kwmaps)
255 else:
256 else:
256 self.templates = _defaultkwmaps(self.ui)
257 self.templates = _defaultkwmaps(self.ui)
257
258
258 @property
259 @property
259 def repo(self):
260 def repo(self):
260 return self._repo()
261 return self._repo()
261
262
262 @util.propertycache
263 @util.propertycache
263 def escape(self):
264 def escape(self):
264 '''Returns bar-separated and escaped keywords.'''
265 '''Returns bar-separated and escaped keywords.'''
265 return b'|'.join(map(stringutil.reescape, self.templates.keys()))
266 return b'|'.join(map(stringutil.reescape, self.templates.keys()))
266
267
267 @util.propertycache
268 @util.propertycache
268 def rekw(self):
269 def rekw(self):
269 '''Returns regex for unexpanded keywords.'''
270 '''Returns regex for unexpanded keywords.'''
270 return re.compile(br'\$(%s)\$' % self.escape)
271 return re.compile(br'\$(%s)\$' % self.escape)
271
272
272 @util.propertycache
273 @util.propertycache
273 def rekwexp(self):
274 def rekwexp(self):
274 '''Returns regex for expanded keywords.'''
275 '''Returns regex for expanded keywords.'''
275 return re.compile(br'\$(%s): [^$\n\r]*? \$' % self.escape)
276 return re.compile(br'\$(%s): [^$\n\r]*? \$' % self.escape)
276
277
277 def substitute(self, data, path, ctx, subfunc):
278 def substitute(self, data, path, ctx, subfunc):
278 '''Replaces keywords in data with expanded template.'''
279 '''Replaces keywords in data with expanded template.'''
279
280
280 def kwsub(mobj):
281 def kwsub(mobj):
281 kw = mobj.group(1)
282 kw = mobj.group(1)
282 ct = logcmdutil.maketemplater(
283 ct = logcmdutil.maketemplater(
283 self.ui, self.repo, self.templates[kw]
284 self.ui, self.repo, self.templates[kw]
284 )
285 )
285 self.ui.pushbuffer()
286 self.ui.pushbuffer()
286 ct.show(ctx, root=self.repo.root, file=path)
287 ct.show(ctx, root=self.repo.root, file=path)
287 ekw = templatefilters.firstline(self.ui.popbuffer())
288 ekw = templatefilters.firstline(self.ui.popbuffer())
288 return b'$%s: %s $' % (kw, ekw)
289 return b'$%s: %s $' % (kw, ekw)
289
290
290 return subfunc(kwsub, data)
291 return subfunc(kwsub, data)
291
292
292 def linkctx(self, path, fileid):
293 def linkctx(self, path, fileid):
293 '''Similar to filelog.linkrev, but returns a changectx.'''
294 '''Similar to filelog.linkrev, but returns a changectx.'''
294 return self.repo.filectx(path, fileid=fileid).changectx()
295 return self.repo.filectx(path, fileid=fileid).changectx()
295
296
296 def expand(self, path, node, data):
297 def expand(self, path, node, data):
297 '''Returns data with keywords expanded.'''
298 '''Returns data with keywords expanded.'''
298 if (
299 if (
299 not self.restrict
300 not self.restrict
300 and self.match(path)
301 and self.match(path)
301 and not stringutil.binary(data)
302 and not stringutil.binary(data)
302 ):
303 ):
303 ctx = self.linkctx(path, node)
304 ctx = self.linkctx(path, node)
304 return self.substitute(data, path, ctx, self.rekw.sub)
305 return self.substitute(data, path, ctx, self.rekw.sub)
305 return data
306 return data
306
307
307 def iskwfile(self, cand, ctx):
308 def iskwfile(self, cand, ctx):
308 """Returns subset of candidates which are configured for keyword
309 """Returns subset of candidates which are configured for keyword
309 expansion but are not symbolic links."""
310 expansion but are not symbolic links."""
310 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
311 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
311
312
312 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
313 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
313 '''Overwrites selected files expanding/shrinking keywords.'''
314 '''Overwrites selected files expanding/shrinking keywords.'''
314 if self.restrict or lookup or self.postcommit: # exclude kw_copy
315 if self.restrict or lookup or self.postcommit: # exclude kw_copy
315 candidates = self.iskwfile(candidates, ctx)
316 candidates = self.iskwfile(candidates, ctx)
316 if not candidates:
317 if not candidates:
317 return
318 return
318 kwcmd = self.restrict and lookup # kwexpand/kwshrink
319 kwcmd = self.restrict and lookup # kwexpand/kwshrink
319 if self.restrict or expand and lookup:
320 if self.restrict or expand and lookup:
320 mf = ctx.manifest()
321 mf = ctx.manifest()
321 if self.restrict or rekw:
322 if self.restrict or rekw:
322 re_kw = self.rekw
323 re_kw = self.rekw
323 else:
324 else:
324 re_kw = self.rekwexp
325 re_kw = self.rekwexp
325 if expand:
326 if expand:
326 msg = _(b'overwriting %s expanding keywords\n')
327 msg = _(b'overwriting %s expanding keywords\n')
327 else:
328 else:
328 msg = _(b'overwriting %s shrinking keywords\n')
329 msg = _(b'overwriting %s shrinking keywords\n')
330 wctx = self.repo[None]
329 for f in candidates:
331 for f in candidates:
330 if self.restrict:
332 if self.restrict:
331 data = self.repo.file(f).read(mf[f])
333 data = self.repo.file(f).read(mf[f])
332 else:
334 else:
333 data = self.repo.wread(f)
335 data = self.repo.wread(f)
334 if stringutil.binary(data):
336 if stringutil.binary(data):
335 continue
337 continue
336 if expand:
338 if expand:
337 parents = ctx.parents()
339 parents = ctx.parents()
338 if lookup:
340 if lookup:
339 ctx = self.linkctx(f, mf[f])
341 ctx = self.linkctx(f, mf[f])
340 elif self.restrict and len(parents) > 1:
342 elif self.restrict and len(parents) > 1:
341 # merge commit
343 # merge commit
342 # in case of conflict f is in modified state during
344 # in case of conflict f is in modified state during
343 # merge, even if f does not differ from f in parent
345 # merge, even if f does not differ from f in parent
344 for p in parents:
346 for p in parents:
345 if f in p and not p[f].cmp(ctx[f]):
347 if f in p and not p[f].cmp(ctx[f]):
346 ctx = p[f].changectx()
348 ctx = p[f].changectx()
347 break
349 break
348 data, found = self.substitute(data, f, ctx, re_kw.subn)
350 data, found = self.substitute(data, f, ctx, re_kw.subn)
349 elif self.restrict:
351 elif self.restrict:
350 found = re_kw.search(data)
352 found = re_kw.search(data)
351 else:
353 else:
352 data, found = _shrinktext(data, re_kw.subn)
354 data, found = _shrinktext(data, re_kw.subn)
353 if found:
355 if found:
354 self.ui.note(msg % f)
356 self.ui.note(msg % f)
355 fp = self.repo.wvfs(f, b"wb", atomictemp=True)
357 fp = self.repo.wvfs(f, b"wb", atomictemp=True)
356 fp.write(data)
358 fp.write(data)
357 fp.close()
359 fp.close()
358 if kwcmd:
360 if kwcmd:
359 self.repo.dirstate.set_clean(f)
361 s = wctx[f].lstat()
362 mode = s.st_mode
363 size = s.st_size
364 mtime = timestamp.mtime_of(s)
365 cache_data = (mode, size, mtime)
366 self.repo.dirstate.set_clean(f, cache_data)
360 elif self.postcommit:
367 elif self.postcommit:
361 self.repo.dirstate.update_file_p1(f, p1_tracked=True)
368 self.repo.dirstate.update_file_p1(f, p1_tracked=True)
362
369
363 def shrink(self, fname, text):
370 def shrink(self, fname, text):
364 '''Returns text with all keyword substitutions removed.'''
371 '''Returns text with all keyword substitutions removed.'''
365 if self.match(fname) and not stringutil.binary(text):
372 if self.match(fname) and not stringutil.binary(text):
366 return _shrinktext(text, self.rekwexp.sub)
373 return _shrinktext(text, self.rekwexp.sub)
367 return text
374 return text
368
375
369 def shrinklines(self, fname, lines):
376 def shrinklines(self, fname, lines):
370 '''Returns lines with keyword substitutions removed.'''
377 '''Returns lines with keyword substitutions removed.'''
371 if self.match(fname):
378 if self.match(fname):
372 text = b''.join(lines)
379 text = b''.join(lines)
373 if not stringutil.binary(text):
380 if not stringutil.binary(text):
374 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
381 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
375 return lines
382 return lines
376
383
377 def wread(self, fname, data):
384 def wread(self, fname, data):
378 """If in restricted mode returns data read from wdir with
385 """If in restricted mode returns data read from wdir with
379 keyword substitutions removed."""
386 keyword substitutions removed."""
380 if self.restrict:
387 if self.restrict:
381 return self.shrink(fname, data)
388 return self.shrink(fname, data)
382 return data
389 return data
383
390
384
391
385 class kwfilelog(filelog.filelog):
392 class kwfilelog(filelog.filelog):
386 """
393 """
387 Subclass of filelog to hook into its read, add, cmp methods.
394 Subclass of filelog to hook into its read, add, cmp methods.
388 Keywords are "stored" unexpanded, and processed on reading.
395 Keywords are "stored" unexpanded, and processed on reading.
389 """
396 """
390
397
391 def __init__(self, opener, kwt, path):
398 def __init__(self, opener, kwt, path):
392 super(kwfilelog, self).__init__(opener, path)
399 super(kwfilelog, self).__init__(opener, path)
393 self.kwt = kwt
400 self.kwt = kwt
394 self.path = path
401 self.path = path
395
402
396 def read(self, node):
403 def read(self, node):
397 '''Expands keywords when reading filelog.'''
404 '''Expands keywords when reading filelog.'''
398 data = super(kwfilelog, self).read(node)
405 data = super(kwfilelog, self).read(node)
399 if self.renamed(node):
406 if self.renamed(node):
400 return data
407 return data
401 return self.kwt.expand(self.path, node, data)
408 return self.kwt.expand(self.path, node, data)
402
409
403 def add(self, text, meta, tr, link, p1=None, p2=None):
410 def add(self, text, meta, tr, link, p1=None, p2=None):
404 '''Removes keyword substitutions when adding to filelog.'''
411 '''Removes keyword substitutions when adding to filelog.'''
405 text = self.kwt.shrink(self.path, text)
412 text = self.kwt.shrink(self.path, text)
406 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
413 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
407
414
408 def cmp(self, node, text):
415 def cmp(self, node, text):
409 '''Removes keyword substitutions for comparison.'''
416 '''Removes keyword substitutions for comparison.'''
410 text = self.kwt.shrink(self.path, text)
417 text = self.kwt.shrink(self.path, text)
411 return super(kwfilelog, self).cmp(node, text)
418 return super(kwfilelog, self).cmp(node, text)
412
419
413
420
414 def _status(ui, repo, wctx, kwt, *pats, **opts):
421 def _status(ui, repo, wctx, kwt, *pats, **opts):
415 """Bails out if [keyword] configuration is not active.
422 """Bails out if [keyword] configuration is not active.
416 Returns status of working directory."""
423 Returns status of working directory."""
417 if kwt:
424 if kwt:
418 opts = pycompat.byteskwargs(opts)
425 opts = pycompat.byteskwargs(opts)
419 return repo.status(
426 return repo.status(
420 match=scmutil.match(wctx, pats, opts),
427 match=scmutil.match(wctx, pats, opts),
421 clean=True,
428 clean=True,
422 unknown=opts.get(b'unknown') or opts.get(b'all'),
429 unknown=opts.get(b'unknown') or opts.get(b'all'),
423 )
430 )
424 if ui.configitems(b'keyword'):
431 if ui.configitems(b'keyword'):
425 raise error.Abort(_(b'[keyword] patterns cannot match'))
432 raise error.Abort(_(b'[keyword] patterns cannot match'))
426 raise error.Abort(_(b'no [keyword] patterns configured'))
433 raise error.Abort(_(b'no [keyword] patterns configured'))
427
434
428
435
429 def _kwfwrite(ui, repo, expand, *pats, **opts):
436 def _kwfwrite(ui, repo, expand, *pats, **opts):
430 '''Selects files and passes them to kwtemplater.overwrite.'''
437 '''Selects files and passes them to kwtemplater.overwrite.'''
431 wctx = repo[None]
438 wctx = repo[None]
432 if len(wctx.parents()) > 1:
439 if len(wctx.parents()) > 1:
433 raise error.Abort(_(b'outstanding uncommitted merge'))
440 raise error.Abort(_(b'outstanding uncommitted merge'))
434 kwt = getattr(repo, '_keywordkwt', None)
441 kwt = getattr(repo, '_keywordkwt', None)
435 with repo.wlock():
442 with repo.wlock():
436 status = _status(ui, repo, wctx, kwt, *pats, **opts)
443 status = _status(ui, repo, wctx, kwt, *pats, **opts)
437 if status.modified or status.added or status.removed or status.deleted:
444 if status.modified or status.added or status.removed or status.deleted:
438 raise error.Abort(_(b'outstanding uncommitted changes'))
445 raise error.Abort(_(b'outstanding uncommitted changes'))
439 kwt.overwrite(wctx, status.clean, True, expand)
446 kwt.overwrite(wctx, status.clean, True, expand)
440
447
441
448
442 @command(
449 @command(
443 b'kwdemo',
450 b'kwdemo',
444 [
451 [
445 (b'd', b'default', None, _(b'show default keyword template maps')),
452 (b'd', b'default', None, _(b'show default keyword template maps')),
446 (b'f', b'rcfile', b'', _(b'read maps from rcfile'), _(b'FILE')),
453 (b'f', b'rcfile', b'', _(b'read maps from rcfile'), _(b'FILE')),
447 ],
454 ],
448 _(b'hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
455 _(b'hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
449 optionalrepo=True,
456 optionalrepo=True,
450 )
457 )
451 def demo(ui, repo, *args, **opts):
458 def demo(ui, repo, *args, **opts):
452 """print [keywordmaps] configuration and an expansion example
459 """print [keywordmaps] configuration and an expansion example
453
460
454 Show current, custom, or default keyword template maps and their
461 Show current, custom, or default keyword template maps and their
455 expansions.
462 expansions.
456
463
457 Extend the current configuration by specifying maps as arguments
464 Extend the current configuration by specifying maps as arguments
458 and using -f/--rcfile to source an external hgrc file.
465 and using -f/--rcfile to source an external hgrc file.
459
466
460 Use -d/--default to disable current configuration.
467 Use -d/--default to disable current configuration.
461
468
462 See :hg:`help templates` for information on templates and filters.
469 See :hg:`help templates` for information on templates and filters.
463 """
470 """
464
471
465 def demoitems(section, items):
472 def demoitems(section, items):
466 ui.write(b'[%s]\n' % section)
473 ui.write(b'[%s]\n' % section)
467 for k, v in sorted(items):
474 for k, v in sorted(items):
468 if isinstance(v, bool):
475 if isinstance(v, bool):
469 v = stringutil.pprint(v)
476 v = stringutil.pprint(v)
470 ui.write(b'%s = %s\n' % (k, v))
477 ui.write(b'%s = %s\n' % (k, v))
471
478
472 fn = b'demo.txt'
479 fn = b'demo.txt'
473 tmpdir = pycompat.mkdtemp(b'', b'kwdemo.')
480 tmpdir = pycompat.mkdtemp(b'', b'kwdemo.')
474 ui.note(_(b'creating temporary repository at %s\n') % tmpdir)
481 ui.note(_(b'creating temporary repository at %s\n') % tmpdir)
475 if repo is None:
482 if repo is None:
476 baseui = ui
483 baseui = ui
477 else:
484 else:
478 baseui = repo.baseui
485 baseui = repo.baseui
479 repo = localrepo.instance(baseui, tmpdir, create=True)
486 repo = localrepo.instance(baseui, tmpdir, create=True)
480 ui.setconfig(b'keyword', fn, b'', b'keyword')
487 ui.setconfig(b'keyword', fn, b'', b'keyword')
481 svn = ui.configbool(b'keywordset', b'svn')
488 svn = ui.configbool(b'keywordset', b'svn')
482 # explicitly set keywordset for demo output
489 # explicitly set keywordset for demo output
483 ui.setconfig(b'keywordset', b'svn', svn, b'keyword')
490 ui.setconfig(b'keywordset', b'svn', svn, b'keyword')
484
491
485 uikwmaps = ui.configitems(b'keywordmaps')
492 uikwmaps = ui.configitems(b'keywordmaps')
486 if args or opts.get('rcfile'):
493 if args or opts.get('rcfile'):
487 ui.status(_(b'\n\tconfiguration using custom keyword template maps\n'))
494 ui.status(_(b'\n\tconfiguration using custom keyword template maps\n'))
488 if uikwmaps:
495 if uikwmaps:
489 ui.status(_(b'\textending current template maps\n'))
496 ui.status(_(b'\textending current template maps\n'))
490 if opts.get('default') or not uikwmaps:
497 if opts.get('default') or not uikwmaps:
491 if svn:
498 if svn:
492 ui.status(_(b'\toverriding default svn keywordset\n'))
499 ui.status(_(b'\toverriding default svn keywordset\n'))
493 else:
500 else:
494 ui.status(_(b'\toverriding default cvs keywordset\n'))
501 ui.status(_(b'\toverriding default cvs keywordset\n'))
495 if opts.get('rcfile'):
502 if opts.get('rcfile'):
496 ui.readconfig(opts.get(b'rcfile'))
503 ui.readconfig(opts.get(b'rcfile'))
497 if args:
504 if args:
498 # simulate hgrc parsing
505 # simulate hgrc parsing
499 rcmaps = b'[keywordmaps]\n%s\n' % b'\n'.join(args)
506 rcmaps = b'[keywordmaps]\n%s\n' % b'\n'.join(args)
500 repo.vfs.write(b'hgrc', rcmaps)
507 repo.vfs.write(b'hgrc', rcmaps)
501 ui.readconfig(repo.vfs.join(b'hgrc'))
508 ui.readconfig(repo.vfs.join(b'hgrc'))
502 kwmaps = dict(ui.configitems(b'keywordmaps'))
509 kwmaps = dict(ui.configitems(b'keywordmaps'))
503 elif opts.get('default'):
510 elif opts.get('default'):
504 if svn:
511 if svn:
505 ui.status(_(b'\n\tconfiguration using default svn keywordset\n'))
512 ui.status(_(b'\n\tconfiguration using default svn keywordset\n'))
506 else:
513 else:
507 ui.status(_(b'\n\tconfiguration using default cvs keywordset\n'))
514 ui.status(_(b'\n\tconfiguration using default cvs keywordset\n'))
508 kwmaps = _defaultkwmaps(ui)
515 kwmaps = _defaultkwmaps(ui)
509 if uikwmaps:
516 if uikwmaps:
510 ui.status(_(b'\tdisabling current template maps\n'))
517 ui.status(_(b'\tdisabling current template maps\n'))
511 for k, v in pycompat.iteritems(kwmaps):
518 for k, v in pycompat.iteritems(kwmaps):
512 ui.setconfig(b'keywordmaps', k, v, b'keyword')
519 ui.setconfig(b'keywordmaps', k, v, b'keyword')
513 else:
520 else:
514 ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
521 ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
515 if uikwmaps:
522 if uikwmaps:
516 kwmaps = dict(uikwmaps)
523 kwmaps = dict(uikwmaps)
517 else:
524 else:
518 kwmaps = _defaultkwmaps(ui)
525 kwmaps = _defaultkwmaps(ui)
519
526
520 uisetup(ui)
527 uisetup(ui)
521 reposetup(ui, repo)
528 reposetup(ui, repo)
522 ui.writenoi18n(b'[extensions]\nkeyword =\n')
529 ui.writenoi18n(b'[extensions]\nkeyword =\n')
523 demoitems(b'keyword', ui.configitems(b'keyword'))
530 demoitems(b'keyword', ui.configitems(b'keyword'))
524 demoitems(b'keywordset', ui.configitems(b'keywordset'))
531 demoitems(b'keywordset', ui.configitems(b'keywordset'))
525 demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
532 demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
526 keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
533 keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
527 repo.wvfs.write(fn, keywords)
534 repo.wvfs.write(fn, keywords)
528 repo[None].add([fn])
535 repo[None].add([fn])
529 ui.note(_(b'\nkeywords written to %s:\n') % fn)
536 ui.note(_(b'\nkeywords written to %s:\n') % fn)
530 ui.note(keywords)
537 ui.note(keywords)
531 with repo.wlock():
538 with repo.wlock():
532 repo.dirstate.setbranch(b'demobranch')
539 repo.dirstate.setbranch(b'demobranch')
533 for name, cmd in ui.configitems(b'hooks'):
540 for name, cmd in ui.configitems(b'hooks'):
534 if name.split(b'.', 1)[0].find(b'commit') > -1:
541 if name.split(b'.', 1)[0].find(b'commit') > -1:
535 repo.ui.setconfig(b'hooks', name, b'', b'keyword')
542 repo.ui.setconfig(b'hooks', name, b'', b'keyword')
536 msg = _(b'hg keyword configuration and expansion example')
543 msg = _(b'hg keyword configuration and expansion example')
537 ui.note((b"hg ci -m '%s'\n" % msg))
544 ui.note((b"hg ci -m '%s'\n" % msg))
538 repo.commit(text=msg)
545 repo.commit(text=msg)
539 ui.status(_(b'\n\tkeywords expanded\n'))
546 ui.status(_(b'\n\tkeywords expanded\n'))
540 ui.write(repo.wread(fn))
547 ui.write(repo.wread(fn))
541 repo.wvfs.rmtree(repo.root)
548 repo.wvfs.rmtree(repo.root)
542
549
543
550
544 @command(
551 @command(
545 b'kwexpand',
552 b'kwexpand',
546 cmdutil.walkopts,
553 cmdutil.walkopts,
547 _(b'hg kwexpand [OPTION]... [FILE]...'),
554 _(b'hg kwexpand [OPTION]... [FILE]...'),
548 inferrepo=True,
555 inferrepo=True,
549 )
556 )
550 def expand(ui, repo, *pats, **opts):
557 def expand(ui, repo, *pats, **opts):
551 """expand keywords in the working directory
558 """expand keywords in the working directory
552
559
553 Run after (re)enabling keyword expansion.
560 Run after (re)enabling keyword expansion.
554
561
555 kwexpand refuses to run if given files contain local changes.
562 kwexpand refuses to run if given files contain local changes.
556 """
563 """
557 # 3rd argument sets expansion to True
564 # 3rd argument sets expansion to True
558 _kwfwrite(ui, repo, True, *pats, **opts)
565 _kwfwrite(ui, repo, True, *pats, **opts)
559
566
560
567
561 @command(
568 @command(
562 b'kwfiles',
569 b'kwfiles',
563 [
570 [
564 (b'A', b'all', None, _(b'show keyword status flags of all files')),
571 (b'A', b'all', None, _(b'show keyword status flags of all files')),
565 (b'i', b'ignore', None, _(b'show files excluded from expansion')),
572 (b'i', b'ignore', None, _(b'show files excluded from expansion')),
566 (b'u', b'unknown', None, _(b'only show unknown (not tracked) files')),
573 (b'u', b'unknown', None, _(b'only show unknown (not tracked) files')),
567 ]
574 ]
568 + cmdutil.walkopts,
575 + cmdutil.walkopts,
569 _(b'hg kwfiles [OPTION]... [FILE]...'),
576 _(b'hg kwfiles [OPTION]... [FILE]...'),
570 inferrepo=True,
577 inferrepo=True,
571 )
578 )
572 def files(ui, repo, *pats, **opts):
579 def files(ui, repo, *pats, **opts):
573 """show files configured for keyword expansion
580 """show files configured for keyword expansion
574
581
575 List which files in the working directory are matched by the
582 List which files in the working directory are matched by the
576 [keyword] configuration patterns.
583 [keyword] configuration patterns.
577
584
578 Useful to prevent inadvertent keyword expansion and to speed up
585 Useful to prevent inadvertent keyword expansion and to speed up
579 execution by including only files that are actual candidates for
586 execution by including only files that are actual candidates for
580 expansion.
587 expansion.
581
588
582 See :hg:`help keyword` on how to construct patterns both for
589 See :hg:`help keyword` on how to construct patterns both for
583 inclusion and exclusion of files.
590 inclusion and exclusion of files.
584
591
585 With -A/--all and -v/--verbose the codes used to show the status
592 With -A/--all and -v/--verbose the codes used to show the status
586 of files are::
593 of files are::
587
594
588 K = keyword expansion candidate
595 K = keyword expansion candidate
589 k = keyword expansion candidate (not tracked)
596 k = keyword expansion candidate (not tracked)
590 I = ignored
597 I = ignored
591 i = ignored (not tracked)
598 i = ignored (not tracked)
592 """
599 """
593 kwt = getattr(repo, '_keywordkwt', None)
600 kwt = getattr(repo, '_keywordkwt', None)
594 wctx = repo[None]
601 wctx = repo[None]
595 status = _status(ui, repo, wctx, kwt, *pats, **opts)
602 status = _status(ui, repo, wctx, kwt, *pats, **opts)
596 if pats:
603 if pats:
597 cwd = repo.getcwd()
604 cwd = repo.getcwd()
598 else:
605 else:
599 cwd = b''
606 cwd = b''
600 files = []
607 files = []
601 opts = pycompat.byteskwargs(opts)
608 opts = pycompat.byteskwargs(opts)
602 if not opts.get(b'unknown') or opts.get(b'all'):
609 if not opts.get(b'unknown') or opts.get(b'all'):
603 files = sorted(status.modified + status.added + status.clean)
610 files = sorted(status.modified + status.added + status.clean)
604 kwfiles = kwt.iskwfile(files, wctx)
611 kwfiles = kwt.iskwfile(files, wctx)
605 kwdeleted = kwt.iskwfile(status.deleted, wctx)
612 kwdeleted = kwt.iskwfile(status.deleted, wctx)
606 kwunknown = kwt.iskwfile(status.unknown, wctx)
613 kwunknown = kwt.iskwfile(status.unknown, wctx)
607 if not opts.get(b'ignore') or opts.get(b'all'):
614 if not opts.get(b'ignore') or opts.get(b'all'):
608 showfiles = kwfiles, kwdeleted, kwunknown
615 showfiles = kwfiles, kwdeleted, kwunknown
609 else:
616 else:
610 showfiles = [], [], []
617 showfiles = [], [], []
611 if opts.get(b'all') or opts.get(b'ignore'):
618 if opts.get(b'all') or opts.get(b'ignore'):
612 showfiles += (
619 showfiles += (
613 [f for f in files if f not in kwfiles],
620 [f for f in files if f not in kwfiles],
614 [f for f in status.unknown if f not in kwunknown],
621 [f for f in status.unknown if f not in kwunknown],
615 )
622 )
616 kwlabels = b'enabled deleted enabledunknown ignored ignoredunknown'.split()
623 kwlabels = b'enabled deleted enabledunknown ignored ignoredunknown'.split()
617 kwstates = zip(kwlabels, pycompat.bytestr(b'K!kIi'), showfiles)
624 kwstates = zip(kwlabels, pycompat.bytestr(b'K!kIi'), showfiles)
618 fm = ui.formatter(b'kwfiles', opts)
625 fm = ui.formatter(b'kwfiles', opts)
619 fmt = b'%.0s%s\n'
626 fmt = b'%.0s%s\n'
620 if opts.get(b'all') or ui.verbose:
627 if opts.get(b'all') or ui.verbose:
621 fmt = b'%s %s\n'
628 fmt = b'%s %s\n'
622 for kwstate, char, filenames in kwstates:
629 for kwstate, char, filenames in kwstates:
623 label = b'kwfiles.' + kwstate
630 label = b'kwfiles.' + kwstate
624 for f in filenames:
631 for f in filenames:
625 fm.startitem()
632 fm.startitem()
626 fm.data(kwstatus=char, path=f)
633 fm.data(kwstatus=char, path=f)
627 fm.plain(fmt % (char, repo.pathto(f, cwd)), label=label)
634 fm.plain(fmt % (char, repo.pathto(f, cwd)), label=label)
628 fm.end()
635 fm.end()
629
636
630
637
631 @command(
638 @command(
632 b'kwshrink',
639 b'kwshrink',
633 cmdutil.walkopts,
640 cmdutil.walkopts,
634 _(b'hg kwshrink [OPTION]... [FILE]...'),
641 _(b'hg kwshrink [OPTION]... [FILE]...'),
635 inferrepo=True,
642 inferrepo=True,
636 )
643 )
637 def shrink(ui, repo, *pats, **opts):
644 def shrink(ui, repo, *pats, **opts):
638 """revert expanded keywords in the working directory
645 """revert expanded keywords in the working directory
639
646
640 Must be run before changing/disabling active keywords.
647 Must be run before changing/disabling active keywords.
641
648
642 kwshrink refuses to run if given files contain local changes.
649 kwshrink refuses to run if given files contain local changes.
643 """
650 """
644 # 3rd argument sets expansion to False
651 # 3rd argument sets expansion to False
645 _kwfwrite(ui, repo, False, *pats, **opts)
652 _kwfwrite(ui, repo, False, *pats, **opts)
646
653
647
654
648 # monkeypatches
655 # monkeypatches
649
656
650
657
651 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
658 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
652 """Monkeypatch/wrap patch.patchfile.__init__ to avoid
659 """Monkeypatch/wrap patch.patchfile.__init__ to avoid
653 rejects or conflicts due to expanded keywords in working dir."""
660 rejects or conflicts due to expanded keywords in working dir."""
654 orig(self, ui, gp, backend, store, eolmode)
661 orig(self, ui, gp, backend, store, eolmode)
655 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
662 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
656 if kwt:
663 if kwt:
657 # shrink keywords read from working dir
664 # shrink keywords read from working dir
658 self.lines = kwt.shrinklines(self.fname, self.lines)
665 self.lines = kwt.shrinklines(self.fname, self.lines)
659
666
660
667
661 def kwdiff(orig, repo, *args, **kwargs):
668 def kwdiff(orig, repo, *args, **kwargs):
662 '''Monkeypatch patch.diff to avoid expansion.'''
669 '''Monkeypatch patch.diff to avoid expansion.'''
663 kwt = getattr(repo, '_keywordkwt', None)
670 kwt = getattr(repo, '_keywordkwt', None)
664 if kwt:
671 if kwt:
665 restrict = kwt.restrict
672 restrict = kwt.restrict
666 kwt.restrict = True
673 kwt.restrict = True
667 try:
674 try:
668 for chunk in orig(repo, *args, **kwargs):
675 for chunk in orig(repo, *args, **kwargs):
669 yield chunk
676 yield chunk
670 finally:
677 finally:
671 if kwt:
678 if kwt:
672 kwt.restrict = restrict
679 kwt.restrict = restrict
673
680
674
681
675 def kwweb_skip(orig, web):
682 def kwweb_skip(orig, web):
676 '''Wraps webcommands.x turning off keyword expansion.'''
683 '''Wraps webcommands.x turning off keyword expansion.'''
677 kwt = getattr(web.repo, '_keywordkwt', None)
684 kwt = getattr(web.repo, '_keywordkwt', None)
678 if kwt:
685 if kwt:
679 origmatch = kwt.match
686 origmatch = kwt.match
680 kwt.match = util.never
687 kwt.match = util.never
681 try:
688 try:
682 for chunk in orig(web):
689 for chunk in orig(web):
683 yield chunk
690 yield chunk
684 finally:
691 finally:
685 if kwt:
692 if kwt:
686 kwt.match = origmatch
693 kwt.match = origmatch
687
694
688
695
689 def kw_amend(orig, ui, repo, old, extra, pats, opts):
696 def kw_amend(orig, ui, repo, old, extra, pats, opts):
690 '''Wraps cmdutil.amend expanding keywords after amend.'''
697 '''Wraps cmdutil.amend expanding keywords after amend.'''
691 kwt = getattr(repo, '_keywordkwt', None)
698 kwt = getattr(repo, '_keywordkwt', None)
692 if kwt is None:
699 if kwt is None:
693 return orig(ui, repo, old, extra, pats, opts)
700 return orig(ui, repo, old, extra, pats, opts)
694 with repo.wlock(), repo.dirstate.parentchange():
701 with repo.wlock(), repo.dirstate.parentchange():
695 kwt.postcommit = True
702 kwt.postcommit = True
696 newid = orig(ui, repo, old, extra, pats, opts)
703 newid = orig(ui, repo, old, extra, pats, opts)
697 if newid != old.node():
704 if newid != old.node():
698 ctx = repo[newid]
705 ctx = repo[newid]
699 kwt.restrict = True
706 kwt.restrict = True
700 kwt.overwrite(ctx, ctx.files(), False, True)
707 kwt.overwrite(ctx, ctx.files(), False, True)
701 kwt.restrict = False
708 kwt.restrict = False
702 return newid
709 return newid
703
710
704
711
705 def kw_copy(orig, ui, repo, pats, opts, rename=False):
712 def kw_copy(orig, ui, repo, pats, opts, rename=False):
706 """Wraps cmdutil.copy so that copy/rename destinations do not
713 """Wraps cmdutil.copy so that copy/rename destinations do not
707 contain expanded keywords.
714 contain expanded keywords.
708 Note that the source of a regular file destination may also be a
715 Note that the source of a regular file destination may also be a
709 symlink:
716 symlink:
710 hg cp sym x -> x is symlink
717 hg cp sym x -> x is symlink
711 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
718 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
712 For the latter we have to follow the symlink to find out whether its
719 For the latter we have to follow the symlink to find out whether its
713 target is configured for expansion and we therefore must unexpand the
720 target is configured for expansion and we therefore must unexpand the
714 keywords in the destination."""
721 keywords in the destination."""
715 kwt = getattr(repo, '_keywordkwt', None)
722 kwt = getattr(repo, '_keywordkwt', None)
716 if kwt is None:
723 if kwt is None:
717 return orig(ui, repo, pats, opts, rename)
724 return orig(ui, repo, pats, opts, rename)
718 with repo.wlock():
725 with repo.wlock():
719 orig(ui, repo, pats, opts, rename)
726 orig(ui, repo, pats, opts, rename)
720 if opts.get(b'dry_run'):
727 if opts.get(b'dry_run'):
721 return
728 return
722 wctx = repo[None]
729 wctx = repo[None]
723 cwd = repo.getcwd()
730 cwd = repo.getcwd()
724
731
725 def haskwsource(dest):
732 def haskwsource(dest):
726 """Returns true if dest is a regular file and configured for
733 """Returns true if dest is a regular file and configured for
727 expansion or a symlink which points to a file configured for
734 expansion or a symlink which points to a file configured for
728 expansion."""
735 expansion."""
729 source = repo.dirstate.copied(dest)
736 source = repo.dirstate.copied(dest)
730 if b'l' in wctx.flags(source):
737 if b'l' in wctx.flags(source):
731 source = pathutil.canonpath(
738 source = pathutil.canonpath(
732 repo.root, cwd, os.path.realpath(source)
739 repo.root, cwd, os.path.realpath(source)
733 )
740 )
734 return kwt.match(source)
741 return kwt.match(source)
735
742
736 candidates = [
743 candidates = [
737 f
744 f
738 for f in repo.dirstate.copies()
745 for f in repo.dirstate.copies()
739 if b'l' not in wctx.flags(f) and haskwsource(f)
746 if b'l' not in wctx.flags(f) and haskwsource(f)
740 ]
747 ]
741 kwt.overwrite(wctx, candidates, False, False)
748 kwt.overwrite(wctx, candidates, False, False)
742
749
743
750
744 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
751 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
745 '''Wraps record.dorecord expanding keywords after recording.'''
752 '''Wraps record.dorecord expanding keywords after recording.'''
746 kwt = getattr(repo, '_keywordkwt', None)
753 kwt = getattr(repo, '_keywordkwt', None)
747 if kwt is None:
754 if kwt is None:
748 return orig(ui, repo, commitfunc, *pats, **opts)
755 return orig(ui, repo, commitfunc, *pats, **opts)
749 with repo.wlock():
756 with repo.wlock():
750 # record returns 0 even when nothing has changed
757 # record returns 0 even when nothing has changed
751 # therefore compare nodes before and after
758 # therefore compare nodes before and after
752 kwt.postcommit = True
759 kwt.postcommit = True
753 ctx = repo[b'.']
760 ctx = repo[b'.']
754 wstatus = ctx.status()
761 wstatus = ctx.status()
755 ret = orig(ui, repo, commitfunc, *pats, **opts)
762 ret = orig(ui, repo, commitfunc, *pats, **opts)
756 recctx = repo[b'.']
763 recctx = repo[b'.']
757 if ctx != recctx:
764 if ctx != recctx:
758 modified, added = _preselect(wstatus, recctx.files())
765 modified, added = _preselect(wstatus, recctx.files())
759 kwt.restrict = False
766 kwt.restrict = False
760 with repo.dirstate.parentchange():
767 with repo.dirstate.parentchange():
761 kwt.overwrite(recctx, modified, False, True)
768 kwt.overwrite(recctx, modified, False, True)
762 kwt.overwrite(recctx, added, False, True, True)
769 kwt.overwrite(recctx, added, False, True, True)
763 kwt.restrict = True
770 kwt.restrict = True
764 return ret
771 return ret
765
772
766
773
767 def kwfilectx_cmp(orig, self, fctx):
774 def kwfilectx_cmp(orig, self, fctx):
768 if fctx._customcmp:
775 if fctx._customcmp:
769 return fctx.cmp(self)
776 return fctx.cmp(self)
770 kwt = getattr(self._repo, '_keywordkwt', None)
777 kwt = getattr(self._repo, '_keywordkwt', None)
771 if kwt is None:
778 if kwt is None:
772 return orig(self, fctx)
779 return orig(self, fctx)
773 # keyword affects data size, comparing wdir and filelog size does
780 # keyword affects data size, comparing wdir and filelog size does
774 # not make sense
781 # not make sense
775 if (
782 if (
776 fctx._filenode is None
783 fctx._filenode is None
777 and (
784 and (
778 self._repo._encodefilterpats
785 self._repo._encodefilterpats
779 or kwt.match(fctx.path())
786 or kwt.match(fctx.path())
780 and b'l' not in fctx.flags()
787 and b'l' not in fctx.flags()
781 or self.size() - 4 == fctx.size()
788 or self.size() - 4 == fctx.size()
782 )
789 )
783 or self.size() == fctx.size()
790 or self.size() == fctx.size()
784 ):
791 ):
785 return self._filelog.cmp(self._filenode, fctx.data())
792 return self._filelog.cmp(self._filenode, fctx.data())
786 return True
793 return True
787
794
788
795
789 def uisetup(ui):
796 def uisetup(ui):
790 """Monkeypatches dispatch._parse to retrieve user command.
797 """Monkeypatches dispatch._parse to retrieve user command.
791 Overrides file method to return kwfilelog instead of filelog
798 Overrides file method to return kwfilelog instead of filelog
792 if file matches user configuration.
799 if file matches user configuration.
793 Wraps commit to overwrite configured files with updated
800 Wraps commit to overwrite configured files with updated
794 keyword substitutions.
801 keyword substitutions.
795 Monkeypatches patch and webcommands."""
802 Monkeypatches patch and webcommands."""
796
803
797 def kwdispatch_parse(orig, ui, args):
804 def kwdispatch_parse(orig, ui, args):
798 '''Monkeypatch dispatch._parse to obtain running hg command.'''
805 '''Monkeypatch dispatch._parse to obtain running hg command.'''
799 cmd, func, args, options, cmdoptions = orig(ui, args)
806 cmd, func, args, options, cmdoptions = orig(ui, args)
800 kwtools[b'hgcmd'] = cmd
807 kwtools[b'hgcmd'] = cmd
801 return cmd, func, args, options, cmdoptions
808 return cmd, func, args, options, cmdoptions
802
809
803 extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse)
810 extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse)
804
811
805 extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp)
812 extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp)
806 extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init)
813 extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init)
807 extensions.wrapfunction(patch, b'diff', kwdiff)
814 extensions.wrapfunction(patch, b'diff', kwdiff)
808 extensions.wrapfunction(cmdutil, b'amend', kw_amend)
815 extensions.wrapfunction(cmdutil, b'amend', kw_amend)
809 extensions.wrapfunction(cmdutil, b'copy', kw_copy)
816 extensions.wrapfunction(cmdutil, b'copy', kw_copy)
810 extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord)
817 extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord)
811 for c in nokwwebcommands.split():
818 for c in nokwwebcommands.split():
812 extensions.wrapfunction(webcommands, c, kwweb_skip)
819 extensions.wrapfunction(webcommands, c, kwweb_skip)
813
820
814
821
815 def reposetup(ui, repo):
822 def reposetup(ui, repo):
816 '''Sets up repo as kwrepo for keyword substitution.'''
823 '''Sets up repo as kwrepo for keyword substitution.'''
817
824
818 try:
825 try:
819 if (
826 if (
820 not repo.local()
827 not repo.local()
821 or kwtools[b'hgcmd'] in nokwcommands.split()
828 or kwtools[b'hgcmd'] in nokwcommands.split()
822 or b'.hg' in util.splitpath(repo.root)
829 or b'.hg' in util.splitpath(repo.root)
823 or repo._url.startswith(b'bundle:')
830 or repo._url.startswith(b'bundle:')
824 ):
831 ):
825 return
832 return
826 except AttributeError:
833 except AttributeError:
827 pass
834 pass
828
835
829 inc, exc = [], [b'.hg*']
836 inc, exc = [], [b'.hg*']
830 for pat, opt in ui.configitems(b'keyword'):
837 for pat, opt in ui.configitems(b'keyword'):
831 if opt != b'ignore':
838 if opt != b'ignore':
832 inc.append(pat)
839 inc.append(pat)
833 else:
840 else:
834 exc.append(pat)
841 exc.append(pat)
835 if not inc:
842 if not inc:
836 return
843 return
837
844
838 kwt = kwtemplater(ui, repo, inc, exc)
845 kwt = kwtemplater(ui, repo, inc, exc)
839
846
840 class kwrepo(repo.__class__):
847 class kwrepo(repo.__class__):
841 def file(self, f):
848 def file(self, f):
842 if f[0] == b'/':
849 if f[0] == b'/':
843 f = f[1:]
850 f = f[1:]
844 return kwfilelog(self.svfs, kwt, f)
851 return kwfilelog(self.svfs, kwt, f)
845
852
846 def wread(self, filename):
853 def wread(self, filename):
847 data = super(kwrepo, self).wread(filename)
854 data = super(kwrepo, self).wread(filename)
848 return kwt.wread(filename, data)
855 return kwt.wread(filename, data)
849
856
850 def commit(self, *args, **opts):
857 def commit(self, *args, **opts):
851 # use custom commitctx for user commands
858 # use custom commitctx for user commands
852 # other extensions can still wrap repo.commitctx directly
859 # other extensions can still wrap repo.commitctx directly
853 self.commitctx = self.kwcommitctx
860 self.commitctx = self.kwcommitctx
854 try:
861 try:
855 return super(kwrepo, self).commit(*args, **opts)
862 return super(kwrepo, self).commit(*args, **opts)
856 finally:
863 finally:
857 del self.commitctx
864 del self.commitctx
858
865
859 def kwcommitctx(self, ctx, error=False, origctx=None):
866 def kwcommitctx(self, ctx, error=False, origctx=None):
860 n = super(kwrepo, self).commitctx(ctx, error, origctx)
867 n = super(kwrepo, self).commitctx(ctx, error, origctx)
861 # no lock needed, only called from repo.commit() which already locks
868 # no lock needed, only called from repo.commit() which already locks
862 if not kwt.postcommit:
869 if not kwt.postcommit:
863 restrict = kwt.restrict
870 restrict = kwt.restrict
864 kwt.restrict = True
871 kwt.restrict = True
865 kwt.overwrite(
872 kwt.overwrite(
866 self[n], sorted(ctx.added() + ctx.modified()), False, True
873 self[n], sorted(ctx.added() + ctx.modified()), False, True
867 )
874 )
868 kwt.restrict = restrict
875 kwt.restrict = restrict
869 return n
876 return n
870
877
871 def rollback(self, dryrun=False, force=False):
878 def rollback(self, dryrun=False, force=False):
872 with self.wlock():
879 with self.wlock():
873 origrestrict = kwt.restrict
880 origrestrict = kwt.restrict
874 try:
881 try:
875 if not dryrun:
882 if not dryrun:
876 changed = self[b'.'].files()
883 changed = self[b'.'].files()
877 ret = super(kwrepo, self).rollback(dryrun, force)
884 ret = super(kwrepo, self).rollback(dryrun, force)
878 if not dryrun:
885 if not dryrun:
879 ctx = self[b'.']
886 ctx = self[b'.']
880 modified, added = _preselect(ctx.status(), changed)
887 modified, added = _preselect(ctx.status(), changed)
881 kwt.restrict = False
888 kwt.restrict = False
882 kwt.overwrite(ctx, modified, True, True)
889 kwt.overwrite(ctx, modified, True, True)
883 kwt.overwrite(ctx, added, True, False)
890 kwt.overwrite(ctx, added, True, False)
884 return ret
891 return ret
885 finally:
892 finally:
886 kwt.restrict = origrestrict
893 kwt.restrict = origrestrict
887
894
888 repo.__class__ = kwrepo
895 repo.__class__ = kwrepo
889 repo._keywordkwt = kwt
896 repo._keywordkwt = kwt
@@ -1,790 +1,798 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import contextlib
12 import contextlib
13 import copy
13 import copy
14 import os
14 import os
15 import stat
15 import stat
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import hex
18 from mercurial.node import hex
19 from mercurial.pycompat import open
19 from mercurial.pycompat import open
20
20
21 from mercurial import (
21 from mercurial import (
22 dirstate,
22 dirstate,
23 encoding,
23 encoding,
24 error,
24 error,
25 httpconnection,
25 httpconnection,
26 match as matchmod,
26 match as matchmod,
27 pycompat,
27 pycompat,
28 requirements,
28 requirements,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 vfs as vfsmod,
32 vfs as vfsmod,
33 )
33 )
34 from mercurial.utils import hashutil
34 from mercurial.utils import hashutil
35 from mercurial.dirstateutils import timestamp
35
36
36 shortname = b'.hglf'
37 shortname = b'.hglf'
37 shortnameslash = shortname + b'/'
38 shortnameslash = shortname + b'/'
38 longname = b'largefiles'
39 longname = b'largefiles'
39
40
40 # -- Private worker functions ------------------------------------------
41 # -- Private worker functions ------------------------------------------
41
42
42
43
43 @contextlib.contextmanager
44 @contextlib.contextmanager
44 def lfstatus(repo, value=True):
45 def lfstatus(repo, value=True):
45 oldvalue = getattr(repo, 'lfstatus', False)
46 oldvalue = getattr(repo, 'lfstatus', False)
46 repo.lfstatus = value
47 repo.lfstatus = value
47 try:
48 try:
48 yield
49 yield
49 finally:
50 finally:
50 repo.lfstatus = oldvalue
51 repo.lfstatus = oldvalue
51
52
52
53
53 def getminsize(ui, assumelfiles, opt, default=10):
54 def getminsize(ui, assumelfiles, opt, default=10):
54 lfsize = opt
55 lfsize = opt
55 if not lfsize and assumelfiles:
56 if not lfsize and assumelfiles:
56 lfsize = ui.config(longname, b'minsize', default=default)
57 lfsize = ui.config(longname, b'minsize', default=default)
57 if lfsize:
58 if lfsize:
58 try:
59 try:
59 lfsize = float(lfsize)
60 lfsize = float(lfsize)
60 except ValueError:
61 except ValueError:
61 raise error.Abort(
62 raise error.Abort(
62 _(b'largefiles: size must be number (not %s)\n') % lfsize
63 _(b'largefiles: size must be number (not %s)\n') % lfsize
63 )
64 )
64 if lfsize is None:
65 if lfsize is None:
65 raise error.Abort(_(b'minimum size for largefiles must be specified'))
66 raise error.Abort(_(b'minimum size for largefiles must be specified'))
66 return lfsize
67 return lfsize
67
68
68
69
69 def link(src, dest):
70 def link(src, dest):
70 """Try to create hardlink - if that fails, efficiently make a copy."""
71 """Try to create hardlink - if that fails, efficiently make a copy."""
71 util.makedirs(os.path.dirname(dest))
72 util.makedirs(os.path.dirname(dest))
72 try:
73 try:
73 util.oslink(src, dest)
74 util.oslink(src, dest)
74 except OSError:
75 except OSError:
75 # if hardlinks fail, fallback on atomic copy
76 # if hardlinks fail, fallback on atomic copy
76 with open(src, b'rb') as srcf, util.atomictempfile(dest) as dstf:
77 with open(src, b'rb') as srcf, util.atomictempfile(dest) as dstf:
77 for chunk in util.filechunkiter(srcf):
78 for chunk in util.filechunkiter(srcf):
78 dstf.write(chunk)
79 dstf.write(chunk)
79 os.chmod(dest, os.stat(src).st_mode)
80 os.chmod(dest, os.stat(src).st_mode)
80
81
81
82
82 def usercachepath(ui, hash):
83 def usercachepath(ui, hash):
83 """Return the correct location in the "global" largefiles cache for a file
84 """Return the correct location in the "global" largefiles cache for a file
84 with the given hash.
85 with the given hash.
85 This cache is used for sharing of largefiles across repositories - both
86 This cache is used for sharing of largefiles across repositories - both
86 to preserve download bandwidth and storage space."""
87 to preserve download bandwidth and storage space."""
87 return os.path.join(_usercachedir(ui), hash)
88 return os.path.join(_usercachedir(ui), hash)
88
89
89
90
90 def _usercachedir(ui, name=longname):
91 def _usercachedir(ui, name=longname):
91 '''Return the location of the "global" largefiles cache.'''
92 '''Return the location of the "global" largefiles cache.'''
92 path = ui.configpath(name, b'usercache')
93 path = ui.configpath(name, b'usercache')
93 if path:
94 if path:
94 return path
95 return path
95
96
96 hint = None
97 hint = None
97
98
98 if pycompat.iswindows:
99 if pycompat.iswindows:
99 appdata = encoding.environ.get(
100 appdata = encoding.environ.get(
100 b'LOCALAPPDATA', encoding.environ.get(b'APPDATA')
101 b'LOCALAPPDATA', encoding.environ.get(b'APPDATA')
101 )
102 )
102 if appdata:
103 if appdata:
103 return os.path.join(appdata, name)
104 return os.path.join(appdata, name)
104
105
105 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
106 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
106 b"LOCALAPPDATA",
107 b"LOCALAPPDATA",
107 b"APPDATA",
108 b"APPDATA",
108 name,
109 name,
109 )
110 )
110 elif pycompat.isdarwin:
111 elif pycompat.isdarwin:
111 home = encoding.environ.get(b'HOME')
112 home = encoding.environ.get(b'HOME')
112 if home:
113 if home:
113 return os.path.join(home, b'Library', b'Caches', name)
114 return os.path.join(home, b'Library', b'Caches', name)
114
115
115 hint = _(b"define %s in the environment, or set %s.usercache") % (
116 hint = _(b"define %s in the environment, or set %s.usercache") % (
116 b"HOME",
117 b"HOME",
117 name,
118 name,
118 )
119 )
119 elif pycompat.isposix:
120 elif pycompat.isposix:
120 path = encoding.environ.get(b'XDG_CACHE_HOME')
121 path = encoding.environ.get(b'XDG_CACHE_HOME')
121 if path:
122 if path:
122 return os.path.join(path, name)
123 return os.path.join(path, name)
123 home = encoding.environ.get(b'HOME')
124 home = encoding.environ.get(b'HOME')
124 if home:
125 if home:
125 return os.path.join(home, b'.cache', name)
126 return os.path.join(home, b'.cache', name)
126
127
127 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
128 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
128 b"XDG_CACHE_HOME",
129 b"XDG_CACHE_HOME",
129 b"HOME",
130 b"HOME",
130 name,
131 name,
131 )
132 )
132 else:
133 else:
133 raise error.Abort(
134 raise error.Abort(
134 _(b'unknown operating system: %s\n') % pycompat.osname
135 _(b'unknown operating system: %s\n') % pycompat.osname
135 )
136 )
136
137
137 raise error.Abort(_(b'unknown %s usercache location') % name, hint=hint)
138 raise error.Abort(_(b'unknown %s usercache location') % name, hint=hint)
138
139
139
140
140 def inusercache(ui, hash):
141 def inusercache(ui, hash):
141 path = usercachepath(ui, hash)
142 path = usercachepath(ui, hash)
142 return os.path.exists(path)
143 return os.path.exists(path)
143
144
144
145
145 def findfile(repo, hash):
146 def findfile(repo, hash):
146 """Return store path of the largefile with the specified hash.
147 """Return store path of the largefile with the specified hash.
147 As a side effect, the file might be linked from user cache.
148 As a side effect, the file might be linked from user cache.
148 Return None if the file can't be found locally."""
149 Return None if the file can't be found locally."""
149 path, exists = findstorepath(repo, hash)
150 path, exists = findstorepath(repo, hash)
150 if exists:
151 if exists:
151 repo.ui.note(_(b'found %s in store\n') % hash)
152 repo.ui.note(_(b'found %s in store\n') % hash)
152 return path
153 return path
153 elif inusercache(repo.ui, hash):
154 elif inusercache(repo.ui, hash):
154 repo.ui.note(_(b'found %s in system cache\n') % hash)
155 repo.ui.note(_(b'found %s in system cache\n') % hash)
155 path = storepath(repo, hash)
156 path = storepath(repo, hash)
156 link(usercachepath(repo.ui, hash), path)
157 link(usercachepath(repo.ui, hash), path)
157 return path
158 return path
158 return None
159 return None
159
160
160
161
161 class largefilesdirstate(dirstate.dirstate):
162 class largefilesdirstate(dirstate.dirstate):
162 def __getitem__(self, key):
163 def __getitem__(self, key):
163 return super(largefilesdirstate, self).__getitem__(unixpath(key))
164 return super(largefilesdirstate, self).__getitem__(unixpath(key))
164
165
165 def set_tracked(self, f):
166 def set_tracked(self, f):
166 return super(largefilesdirstate, self).set_tracked(unixpath(f))
167 return super(largefilesdirstate, self).set_tracked(unixpath(f))
167
168
168 def set_untracked(self, f):
169 def set_untracked(self, f):
169 return super(largefilesdirstate, self).set_untracked(unixpath(f))
170 return super(largefilesdirstate, self).set_untracked(unixpath(f))
170
171
171 def normal(self, f, parentfiledata=None):
172 def normal(self, f, parentfiledata=None):
172 # not sure if we should pass the `parentfiledata` down or throw it
173 # not sure if we should pass the `parentfiledata` down or throw it
173 # away. So throwing it away to stay on the safe side.
174 # away. So throwing it away to stay on the safe side.
174 return super(largefilesdirstate, self).normal(unixpath(f))
175 return super(largefilesdirstate, self).normal(unixpath(f))
175
176
176 def remove(self, f):
177 def remove(self, f):
177 return super(largefilesdirstate, self).remove(unixpath(f))
178 return super(largefilesdirstate, self).remove(unixpath(f))
178
179
179 def add(self, f):
180 def add(self, f):
180 return super(largefilesdirstate, self).add(unixpath(f))
181 return super(largefilesdirstate, self).add(unixpath(f))
181
182
182 def drop(self, f):
183 def drop(self, f):
183 return super(largefilesdirstate, self).drop(unixpath(f))
184 return super(largefilesdirstate, self).drop(unixpath(f))
184
185
185 def forget(self, f):
186 def forget(self, f):
186 return super(largefilesdirstate, self).forget(unixpath(f))
187 return super(largefilesdirstate, self).forget(unixpath(f))
187
188
188 def normallookup(self, f):
189 def normallookup(self, f):
189 return super(largefilesdirstate, self).normallookup(unixpath(f))
190 return super(largefilesdirstate, self).normallookup(unixpath(f))
190
191
191 def _ignore(self, f):
192 def _ignore(self, f):
192 return False
193 return False
193
194
194 def write(self, tr):
195 def write(self, tr):
195 # (1) disable PENDING mode always
196 # (1) disable PENDING mode always
196 # (lfdirstate isn't yet managed as a part of the transaction)
197 # (lfdirstate isn't yet managed as a part of the transaction)
197 # (2) avoid develwarn 'use dirstate.write with ....'
198 # (2) avoid develwarn 'use dirstate.write with ....'
198 if tr:
199 if tr:
199 tr.addbackup(b'largefiles/dirstate', location=b'plain')
200 tr.addbackup(b'largefiles/dirstate', location=b'plain')
200 super(largefilesdirstate, self).write(None)
201 super(largefilesdirstate, self).write(None)
201
202
202
203
203 def openlfdirstate(ui, repo, create=True):
204 def openlfdirstate(ui, repo, create=True):
204 """
205 """
205 Return a dirstate object that tracks largefiles: i.e. its root is
206 Return a dirstate object that tracks largefiles: i.e. its root is
206 the repo root, but it is saved in .hg/largefiles/dirstate.
207 the repo root, but it is saved in .hg/largefiles/dirstate.
207 """
208 """
208 vfs = repo.vfs
209 vfs = repo.vfs
209 lfstoredir = longname
210 lfstoredir = longname
210 opener = vfsmod.vfs(vfs.join(lfstoredir))
211 opener = vfsmod.vfs(vfs.join(lfstoredir))
211 use_dirstate_v2 = requirements.DIRSTATE_V2_REQUIREMENT in repo.requirements
212 use_dirstate_v2 = requirements.DIRSTATE_V2_REQUIREMENT in repo.requirements
212 lfdirstate = largefilesdirstate(
213 lfdirstate = largefilesdirstate(
213 opener,
214 opener,
214 ui,
215 ui,
215 repo.root,
216 repo.root,
216 repo.dirstate._validate,
217 repo.dirstate._validate,
217 lambda: sparse.matcher(repo),
218 lambda: sparse.matcher(repo),
218 repo.nodeconstants,
219 repo.nodeconstants,
219 use_dirstate_v2,
220 use_dirstate_v2,
220 )
221 )
221
222
222 # If the largefiles dirstate does not exist, populate and create
223 # If the largefiles dirstate does not exist, populate and create
223 # it. This ensures that we create it on the first meaningful
224 # it. This ensures that we create it on the first meaningful
224 # largefiles operation in a new clone.
225 # largefiles operation in a new clone.
225 if create and not vfs.exists(vfs.join(lfstoredir, b'dirstate')):
226 if create and not vfs.exists(vfs.join(lfstoredir, b'dirstate')):
226 matcher = getstandinmatcher(repo)
227 matcher = getstandinmatcher(repo)
227 standins = repo.dirstate.walk(
228 standins = repo.dirstate.walk(
228 matcher, subrepos=[], unknown=False, ignored=False
229 matcher, subrepos=[], unknown=False, ignored=False
229 )
230 )
230
231
231 if len(standins) > 0:
232 if len(standins) > 0:
232 vfs.makedirs(lfstoredir)
233 vfs.makedirs(lfstoredir)
233
234
234 with lfdirstate.parentchange():
235 with lfdirstate.parentchange():
235 for standin in standins:
236 for standin in standins:
236 lfile = splitstandin(standin)
237 lfile = splitstandin(standin)
237 lfdirstate.update_file(
238 lfdirstate.update_file(
238 lfile, p1_tracked=True, wc_tracked=True, possibly_dirty=True
239 lfile, p1_tracked=True, wc_tracked=True, possibly_dirty=True
239 )
240 )
240 return lfdirstate
241 return lfdirstate
241
242
242
243
243 def lfdirstatestatus(lfdirstate, repo):
244 def lfdirstatestatus(lfdirstate, repo):
244 pctx = repo[b'.']
245 pctx = repo[b'.']
245 match = matchmod.always()
246 match = matchmod.always()
246 unsure, s = lfdirstate.status(
247 unsure, s, mtime_boundary = lfdirstate.status(
247 match, subrepos=[], ignored=False, clean=False, unknown=False
248 match, subrepos=[], ignored=False, clean=False, unknown=False
248 )
249 )
249 modified, clean = s.modified, s.clean
250 modified, clean = s.modified, s.clean
251 wctx = repo[None]
250 for lfile in unsure:
252 for lfile in unsure:
251 try:
253 try:
252 fctx = pctx[standin(lfile)]
254 fctx = pctx[standin(lfile)]
253 except LookupError:
255 except LookupError:
254 fctx = None
256 fctx = None
255 if not fctx or readasstandin(fctx) != hashfile(repo.wjoin(lfile)):
257 if not fctx or readasstandin(fctx) != hashfile(repo.wjoin(lfile)):
256 modified.append(lfile)
258 modified.append(lfile)
257 else:
259 else:
258 clean.append(lfile)
260 clean.append(lfile)
259 lfdirstate.set_clean(lfile)
261 st = wctx[lfile].lstat()
262 mode = st.st_mode
263 size = st.st_size
264 mtime = timestamp.reliable_mtime_of(st, mtime_boundary)
265 if mtime is not None:
266 cache_data = (mode, size, mtime)
267 lfdirstate.set_clean(lfile, cache_data)
260 return s
268 return s
261
269
262
270
263 def listlfiles(repo, rev=None, matcher=None):
271 def listlfiles(repo, rev=None, matcher=None):
264 """return a list of largefiles in the working copy or the
272 """return a list of largefiles in the working copy or the
265 specified changeset"""
273 specified changeset"""
266
274
267 if matcher is None:
275 if matcher is None:
268 matcher = getstandinmatcher(repo)
276 matcher = getstandinmatcher(repo)
269
277
270 # ignore unknown files in working directory
278 # ignore unknown files in working directory
271 return [
279 return [
272 splitstandin(f)
280 splitstandin(f)
273 for f in repo[rev].walk(matcher)
281 for f in repo[rev].walk(matcher)
274 if rev is not None or repo.dirstate.get_entry(f).any_tracked
282 if rev is not None or repo.dirstate.get_entry(f).any_tracked
275 ]
283 ]
276
284
277
285
278 def instore(repo, hash, forcelocal=False):
286 def instore(repo, hash, forcelocal=False):
279 '''Return true if a largefile with the given hash exists in the store'''
287 '''Return true if a largefile with the given hash exists in the store'''
280 return os.path.exists(storepath(repo, hash, forcelocal))
288 return os.path.exists(storepath(repo, hash, forcelocal))
281
289
282
290
283 def storepath(repo, hash, forcelocal=False):
291 def storepath(repo, hash, forcelocal=False):
284 """Return the correct location in the repository largefiles store for a
292 """Return the correct location in the repository largefiles store for a
285 file with the given hash."""
293 file with the given hash."""
286 if not forcelocal and repo.shared():
294 if not forcelocal and repo.shared():
287 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
295 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
288 return repo.vfs.join(longname, hash)
296 return repo.vfs.join(longname, hash)
289
297
290
298
291 def findstorepath(repo, hash):
299 def findstorepath(repo, hash):
292 """Search through the local store path(s) to find the file for the given
300 """Search through the local store path(s) to find the file for the given
293 hash. If the file is not found, its path in the primary store is returned.
301 hash. If the file is not found, its path in the primary store is returned.
294 The return value is a tuple of (path, exists(path)).
302 The return value is a tuple of (path, exists(path)).
295 """
303 """
296 # For shared repos, the primary store is in the share source. But for
304 # For shared repos, the primary store is in the share source. But for
297 # backward compatibility, force a lookup in the local store if it wasn't
305 # backward compatibility, force a lookup in the local store if it wasn't
298 # found in the share source.
306 # found in the share source.
299 path = storepath(repo, hash, False)
307 path = storepath(repo, hash, False)
300
308
301 if instore(repo, hash):
309 if instore(repo, hash):
302 return (path, True)
310 return (path, True)
303 elif repo.shared() and instore(repo, hash, True):
311 elif repo.shared() and instore(repo, hash, True):
304 return storepath(repo, hash, True), True
312 return storepath(repo, hash, True), True
305
313
306 return (path, False)
314 return (path, False)
307
315
308
316
309 def copyfromcache(repo, hash, filename):
317 def copyfromcache(repo, hash, filename):
310 """Copy the specified largefile from the repo or system cache to
318 """Copy the specified largefile from the repo or system cache to
311 filename in the repository. Return true on success or false if the
319 filename in the repository. Return true on success or false if the
312 file was not found in either cache (which should not happened:
320 file was not found in either cache (which should not happened:
313 this is meant to be called only after ensuring that the needed
321 this is meant to be called only after ensuring that the needed
314 largefile exists in the cache)."""
322 largefile exists in the cache)."""
315 wvfs = repo.wvfs
323 wvfs = repo.wvfs
316 path = findfile(repo, hash)
324 path = findfile(repo, hash)
317 if path is None:
325 if path is None:
318 return False
326 return False
319 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
327 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
320 # The write may fail before the file is fully written, but we
328 # The write may fail before the file is fully written, but we
321 # don't use atomic writes in the working copy.
329 # don't use atomic writes in the working copy.
322 with open(path, b'rb') as srcfd, wvfs(filename, b'wb') as destfd:
330 with open(path, b'rb') as srcfd, wvfs(filename, b'wb') as destfd:
323 gothash = copyandhash(util.filechunkiter(srcfd), destfd)
331 gothash = copyandhash(util.filechunkiter(srcfd), destfd)
324 if gothash != hash:
332 if gothash != hash:
325 repo.ui.warn(
333 repo.ui.warn(
326 _(b'%s: data corruption in %s with hash %s\n')
334 _(b'%s: data corruption in %s with hash %s\n')
327 % (filename, path, gothash)
335 % (filename, path, gothash)
328 )
336 )
329 wvfs.unlink(filename)
337 wvfs.unlink(filename)
330 return False
338 return False
331 return True
339 return True
332
340
333
341
334 def copytostore(repo, ctx, file, fstandin):
342 def copytostore(repo, ctx, file, fstandin):
335 wvfs = repo.wvfs
343 wvfs = repo.wvfs
336 hash = readasstandin(ctx[fstandin])
344 hash = readasstandin(ctx[fstandin])
337 if instore(repo, hash):
345 if instore(repo, hash):
338 return
346 return
339 if wvfs.exists(file):
347 if wvfs.exists(file):
340 copytostoreabsolute(repo, wvfs.join(file), hash)
348 copytostoreabsolute(repo, wvfs.join(file), hash)
341 else:
349 else:
342 repo.ui.warn(
350 repo.ui.warn(
343 _(b"%s: largefile %s not available from local store\n")
351 _(b"%s: largefile %s not available from local store\n")
344 % (file, hash)
352 % (file, hash)
345 )
353 )
346
354
347
355
348 def copyalltostore(repo, node):
356 def copyalltostore(repo, node):
349 '''Copy all largefiles in a given revision to the store'''
357 '''Copy all largefiles in a given revision to the store'''
350
358
351 ctx = repo[node]
359 ctx = repo[node]
352 for filename in ctx.files():
360 for filename in ctx.files():
353 realfile = splitstandin(filename)
361 realfile = splitstandin(filename)
354 if realfile is not None and filename in ctx.manifest():
362 if realfile is not None and filename in ctx.manifest():
355 copytostore(repo, ctx, realfile, filename)
363 copytostore(repo, ctx, realfile, filename)
356
364
357
365
358 def copytostoreabsolute(repo, file, hash):
366 def copytostoreabsolute(repo, file, hash):
359 if inusercache(repo.ui, hash):
367 if inusercache(repo.ui, hash):
360 link(usercachepath(repo.ui, hash), storepath(repo, hash))
368 link(usercachepath(repo.ui, hash), storepath(repo, hash))
361 else:
369 else:
362 util.makedirs(os.path.dirname(storepath(repo, hash)))
370 util.makedirs(os.path.dirname(storepath(repo, hash)))
363 with open(file, b'rb') as srcf:
371 with open(file, b'rb') as srcf:
364 with util.atomictempfile(
372 with util.atomictempfile(
365 storepath(repo, hash), createmode=repo.store.createmode
373 storepath(repo, hash), createmode=repo.store.createmode
366 ) as dstf:
374 ) as dstf:
367 for chunk in util.filechunkiter(srcf):
375 for chunk in util.filechunkiter(srcf):
368 dstf.write(chunk)
376 dstf.write(chunk)
369 linktousercache(repo, hash)
377 linktousercache(repo, hash)
370
378
371
379
372 def linktousercache(repo, hash):
380 def linktousercache(repo, hash):
373 """Link / copy the largefile with the specified hash from the store
381 """Link / copy the largefile with the specified hash from the store
374 to the cache."""
382 to the cache."""
375 path = usercachepath(repo.ui, hash)
383 path = usercachepath(repo.ui, hash)
376 link(storepath(repo, hash), path)
384 link(storepath(repo, hash), path)
377
385
378
386
379 def getstandinmatcher(repo, rmatcher=None):
387 def getstandinmatcher(repo, rmatcher=None):
380 '''Return a match object that applies rmatcher to the standin directory'''
388 '''Return a match object that applies rmatcher to the standin directory'''
381 wvfs = repo.wvfs
389 wvfs = repo.wvfs
382 standindir = shortname
390 standindir = shortname
383
391
384 # no warnings about missing files or directories
392 # no warnings about missing files or directories
385 badfn = lambda f, msg: None
393 badfn = lambda f, msg: None
386
394
387 if rmatcher and not rmatcher.always():
395 if rmatcher and not rmatcher.always():
388 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
396 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
389 if not pats:
397 if not pats:
390 pats = [wvfs.join(standindir)]
398 pats = [wvfs.join(standindir)]
391 match = scmutil.match(repo[None], pats, badfn=badfn)
399 match = scmutil.match(repo[None], pats, badfn=badfn)
392 else:
400 else:
393 # no patterns: relative to repo root
401 # no patterns: relative to repo root
394 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
402 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
395 return match
403 return match
396
404
397
405
398 def composestandinmatcher(repo, rmatcher):
406 def composestandinmatcher(repo, rmatcher):
399 """Return a matcher that accepts standins corresponding to the
407 """Return a matcher that accepts standins corresponding to the
400 files accepted by rmatcher. Pass the list of files in the matcher
408 files accepted by rmatcher. Pass the list of files in the matcher
401 as the paths specified by the user."""
409 as the paths specified by the user."""
402 smatcher = getstandinmatcher(repo, rmatcher)
410 smatcher = getstandinmatcher(repo, rmatcher)
403 isstandin = smatcher.matchfn
411 isstandin = smatcher.matchfn
404
412
405 def composedmatchfn(f):
413 def composedmatchfn(f):
406 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
414 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
407
415
408 smatcher.matchfn = composedmatchfn
416 smatcher.matchfn = composedmatchfn
409
417
410 return smatcher
418 return smatcher
411
419
412
420
413 def standin(filename):
421 def standin(filename):
414 """Return the repo-relative path to the standin for the specified big
422 """Return the repo-relative path to the standin for the specified big
415 file."""
423 file."""
416 # Notes:
424 # Notes:
417 # 1) Some callers want an absolute path, but for instance addlargefiles
425 # 1) Some callers want an absolute path, but for instance addlargefiles
418 # needs it repo-relative so it can be passed to repo[None].add(). So
426 # needs it repo-relative so it can be passed to repo[None].add(). So
419 # leave it up to the caller to use repo.wjoin() to get an absolute path.
427 # leave it up to the caller to use repo.wjoin() to get an absolute path.
420 # 2) Join with '/' because that's what dirstate always uses, even on
428 # 2) Join with '/' because that's what dirstate always uses, even on
421 # Windows. Change existing separator to '/' first in case we are
429 # Windows. Change existing separator to '/' first in case we are
422 # passed filenames from an external source (like the command line).
430 # passed filenames from an external source (like the command line).
423 return shortnameslash + util.pconvert(filename)
431 return shortnameslash + util.pconvert(filename)
424
432
425
433
426 def isstandin(filename):
434 def isstandin(filename):
427 """Return true if filename is a big file standin. filename must be
435 """Return true if filename is a big file standin. filename must be
428 in Mercurial's internal form (slash-separated)."""
436 in Mercurial's internal form (slash-separated)."""
429 return filename.startswith(shortnameslash)
437 return filename.startswith(shortnameslash)
430
438
431
439
432 def splitstandin(filename):
440 def splitstandin(filename):
433 # Split on / because that's what dirstate always uses, even on Windows.
441 # Split on / because that's what dirstate always uses, even on Windows.
434 # Change local separator to / first just in case we are passed filenames
442 # Change local separator to / first just in case we are passed filenames
435 # from an external source (like the command line).
443 # from an external source (like the command line).
436 bits = util.pconvert(filename).split(b'/', 1)
444 bits = util.pconvert(filename).split(b'/', 1)
437 if len(bits) == 2 and bits[0] == shortname:
445 if len(bits) == 2 and bits[0] == shortname:
438 return bits[1]
446 return bits[1]
439 else:
447 else:
440 return None
448 return None
441
449
442
450
443 def updatestandin(repo, lfile, standin):
451 def updatestandin(repo, lfile, standin):
444 """Re-calculate hash value of lfile and write it into standin
452 """Re-calculate hash value of lfile and write it into standin
445
453
446 This assumes that "lfutil.standin(lfile) == standin", for efficiency.
454 This assumes that "lfutil.standin(lfile) == standin", for efficiency.
447 """
455 """
448 file = repo.wjoin(lfile)
456 file = repo.wjoin(lfile)
449 if repo.wvfs.exists(lfile):
457 if repo.wvfs.exists(lfile):
450 hash = hashfile(file)
458 hash = hashfile(file)
451 executable = getexecutable(file)
459 executable = getexecutable(file)
452 writestandin(repo, standin, hash, executable)
460 writestandin(repo, standin, hash, executable)
453 else:
461 else:
454 raise error.Abort(_(b'%s: file not found!') % lfile)
462 raise error.Abort(_(b'%s: file not found!') % lfile)
455
463
456
464
457 def readasstandin(fctx):
465 def readasstandin(fctx):
458 """read hex hash from given filectx of standin file
466 """read hex hash from given filectx of standin file
459
467
460 This encapsulates how "standin" data is stored into storage layer."""
468 This encapsulates how "standin" data is stored into storage layer."""
461 return fctx.data().strip()
469 return fctx.data().strip()
462
470
463
471
464 def writestandin(repo, standin, hash, executable):
472 def writestandin(repo, standin, hash, executable):
465 '''write hash to <repo.root>/<standin>'''
473 '''write hash to <repo.root>/<standin>'''
466 repo.wwrite(standin, hash + b'\n', executable and b'x' or b'')
474 repo.wwrite(standin, hash + b'\n', executable and b'x' or b'')
467
475
468
476
469 def copyandhash(instream, outfile):
477 def copyandhash(instream, outfile):
470 """Read bytes from instream (iterable) and write them to outfile,
478 """Read bytes from instream (iterable) and write them to outfile,
471 computing the SHA-1 hash of the data along the way. Return the hash."""
479 computing the SHA-1 hash of the data along the way. Return the hash."""
472 hasher = hashutil.sha1(b'')
480 hasher = hashutil.sha1(b'')
473 for data in instream:
481 for data in instream:
474 hasher.update(data)
482 hasher.update(data)
475 outfile.write(data)
483 outfile.write(data)
476 return hex(hasher.digest())
484 return hex(hasher.digest())
477
485
478
486
479 def hashfile(file):
487 def hashfile(file):
480 if not os.path.exists(file):
488 if not os.path.exists(file):
481 return b''
489 return b''
482 with open(file, b'rb') as fd:
490 with open(file, b'rb') as fd:
483 return hexsha1(fd)
491 return hexsha1(fd)
484
492
485
493
486 def getexecutable(filename):
494 def getexecutable(filename):
487 mode = os.stat(filename).st_mode
495 mode = os.stat(filename).st_mode
488 return (
496 return (
489 (mode & stat.S_IXUSR)
497 (mode & stat.S_IXUSR)
490 and (mode & stat.S_IXGRP)
498 and (mode & stat.S_IXGRP)
491 and (mode & stat.S_IXOTH)
499 and (mode & stat.S_IXOTH)
492 )
500 )
493
501
494
502
495 def urljoin(first, second, *arg):
503 def urljoin(first, second, *arg):
496 def join(left, right):
504 def join(left, right):
497 if not left.endswith(b'/'):
505 if not left.endswith(b'/'):
498 left += b'/'
506 left += b'/'
499 if right.startswith(b'/'):
507 if right.startswith(b'/'):
500 right = right[1:]
508 right = right[1:]
501 return left + right
509 return left + right
502
510
503 url = join(first, second)
511 url = join(first, second)
504 for a in arg:
512 for a in arg:
505 url = join(url, a)
513 url = join(url, a)
506 return url
514 return url
507
515
508
516
509 def hexsha1(fileobj):
517 def hexsha1(fileobj):
510 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
518 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
511 object data"""
519 object data"""
512 h = hashutil.sha1()
520 h = hashutil.sha1()
513 for chunk in util.filechunkiter(fileobj):
521 for chunk in util.filechunkiter(fileobj):
514 h.update(chunk)
522 h.update(chunk)
515 return hex(h.digest())
523 return hex(h.digest())
516
524
517
525
518 def httpsendfile(ui, filename):
526 def httpsendfile(ui, filename):
519 return httpconnection.httpsendfile(ui, filename, b'rb')
527 return httpconnection.httpsendfile(ui, filename, b'rb')
520
528
521
529
522 def unixpath(path):
530 def unixpath(path):
523 '''Return a version of path normalized for use with the lfdirstate.'''
531 '''Return a version of path normalized for use with the lfdirstate.'''
524 return util.pconvert(os.path.normpath(path))
532 return util.pconvert(os.path.normpath(path))
525
533
526
534
527 def islfilesrepo(repo):
535 def islfilesrepo(repo):
528 '''Return true if the repo is a largefile repo.'''
536 '''Return true if the repo is a largefile repo.'''
529 if b'largefiles' in repo.requirements and any(
537 if b'largefiles' in repo.requirements and any(
530 shortnameslash in f[1] for f in repo.store.datafiles()
538 shortnameslash in f[1] for f in repo.store.datafiles()
531 ):
539 ):
532 return True
540 return True
533
541
534 return any(openlfdirstate(repo.ui, repo, False))
542 return any(openlfdirstate(repo.ui, repo, False))
535
543
536
544
537 class storeprotonotcapable(Exception):
545 class storeprotonotcapable(Exception):
538 def __init__(self, storetypes):
546 def __init__(self, storetypes):
539 self.storetypes = storetypes
547 self.storetypes = storetypes
540
548
541
549
542 def getstandinsstate(repo):
550 def getstandinsstate(repo):
543 standins = []
551 standins = []
544 matcher = getstandinmatcher(repo)
552 matcher = getstandinmatcher(repo)
545 wctx = repo[None]
553 wctx = repo[None]
546 for standin in repo.dirstate.walk(
554 for standin in repo.dirstate.walk(
547 matcher, subrepos=[], unknown=False, ignored=False
555 matcher, subrepos=[], unknown=False, ignored=False
548 ):
556 ):
549 lfile = splitstandin(standin)
557 lfile = splitstandin(standin)
550 try:
558 try:
551 hash = readasstandin(wctx[standin])
559 hash = readasstandin(wctx[standin])
552 except IOError:
560 except IOError:
553 hash = None
561 hash = None
554 standins.append((lfile, hash))
562 standins.append((lfile, hash))
555 return standins
563 return standins
556
564
557
565
558 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
566 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
559 lfstandin = standin(lfile)
567 lfstandin = standin(lfile)
560 if lfstandin not in repo.dirstate:
568 if lfstandin not in repo.dirstate:
561 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=False)
569 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=False)
562 else:
570 else:
563 entry = repo.dirstate.get_entry(lfstandin)
571 entry = repo.dirstate.get_entry(lfstandin)
564 lfdirstate.update_file(
572 lfdirstate.update_file(
565 lfile,
573 lfile,
566 wc_tracked=entry.tracked,
574 wc_tracked=entry.tracked,
567 p1_tracked=entry.p1_tracked,
575 p1_tracked=entry.p1_tracked,
568 p2_info=entry.p2_info,
576 p2_info=entry.p2_info,
569 possibly_dirty=True,
577 possibly_dirty=True,
570 )
578 )
571
579
572
580
573 def markcommitted(orig, ctx, node):
581 def markcommitted(orig, ctx, node):
574 repo = ctx.repo()
582 repo = ctx.repo()
575
583
576 lfdirstate = openlfdirstate(repo.ui, repo)
584 lfdirstate = openlfdirstate(repo.ui, repo)
577 with lfdirstate.parentchange():
585 with lfdirstate.parentchange():
578 orig(node)
586 orig(node)
579
587
580 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
588 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
581 # because files coming from the 2nd parent are omitted in the latter.
589 # because files coming from the 2nd parent are omitted in the latter.
582 #
590 #
583 # The former should be used to get targets of "synclfdirstate",
591 # The former should be used to get targets of "synclfdirstate",
584 # because such files:
592 # because such files:
585 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
593 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
586 # - have to be marked as "n" after commit, but
594 # - have to be marked as "n" after commit, but
587 # - aren't listed in "repo[node].files()"
595 # - aren't listed in "repo[node].files()"
588
596
589 for f in ctx.files():
597 for f in ctx.files():
590 lfile = splitstandin(f)
598 lfile = splitstandin(f)
591 if lfile is not None:
599 if lfile is not None:
592 synclfdirstate(repo, lfdirstate, lfile, False)
600 synclfdirstate(repo, lfdirstate, lfile, False)
593 lfdirstate.write(repo.currenttransaction())
601 lfdirstate.write(repo.currenttransaction())
594
602
595 # As part of committing, copy all of the largefiles into the cache.
603 # As part of committing, copy all of the largefiles into the cache.
596 #
604 #
597 # Using "node" instead of "ctx" implies additional "repo[node]"
605 # Using "node" instead of "ctx" implies additional "repo[node]"
598 # lookup while copyalltostore(), but can omit redundant check for
606 # lookup while copyalltostore(), but can omit redundant check for
599 # files comming from the 2nd parent, which should exist in store
607 # files comming from the 2nd parent, which should exist in store
600 # at merging.
608 # at merging.
601 copyalltostore(repo, node)
609 copyalltostore(repo, node)
602
610
603
611
604 def getlfilestoupdate(oldstandins, newstandins):
612 def getlfilestoupdate(oldstandins, newstandins):
605 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
613 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
606 filelist = []
614 filelist = []
607 for f in changedstandins:
615 for f in changedstandins:
608 if f[0] not in filelist:
616 if f[0] not in filelist:
609 filelist.append(f[0])
617 filelist.append(f[0])
610 return filelist
618 return filelist
611
619
612
620
613 def getlfilestoupload(repo, missing, addfunc):
621 def getlfilestoupload(repo, missing, addfunc):
614 makeprogress = repo.ui.makeprogress
622 makeprogress = repo.ui.makeprogress
615 with makeprogress(
623 with makeprogress(
616 _(b'finding outgoing largefiles'),
624 _(b'finding outgoing largefiles'),
617 unit=_(b'revisions'),
625 unit=_(b'revisions'),
618 total=len(missing),
626 total=len(missing),
619 ) as progress:
627 ) as progress:
620 for i, n in enumerate(missing):
628 for i, n in enumerate(missing):
621 progress.update(i)
629 progress.update(i)
622 parents = [p for p in repo[n].parents() if p != repo.nullid]
630 parents = [p for p in repo[n].parents() if p != repo.nullid]
623
631
624 with lfstatus(repo, value=False):
632 with lfstatus(repo, value=False):
625 ctx = repo[n]
633 ctx = repo[n]
626
634
627 files = set(ctx.files())
635 files = set(ctx.files())
628 if len(parents) == 2:
636 if len(parents) == 2:
629 mc = ctx.manifest()
637 mc = ctx.manifest()
630 mp1 = ctx.p1().manifest()
638 mp1 = ctx.p1().manifest()
631 mp2 = ctx.p2().manifest()
639 mp2 = ctx.p2().manifest()
632 for f in mp1:
640 for f in mp1:
633 if f not in mc:
641 if f not in mc:
634 files.add(f)
642 files.add(f)
635 for f in mp2:
643 for f in mp2:
636 if f not in mc:
644 if f not in mc:
637 files.add(f)
645 files.add(f)
638 for f in mc:
646 for f in mc:
639 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
647 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
640 files.add(f)
648 files.add(f)
641 for fn in files:
649 for fn in files:
642 if isstandin(fn) and fn in ctx:
650 if isstandin(fn) and fn in ctx:
643 addfunc(fn, readasstandin(ctx[fn]))
651 addfunc(fn, readasstandin(ctx[fn]))
644
652
645
653
646 def updatestandinsbymatch(repo, match):
654 def updatestandinsbymatch(repo, match):
647 """Update standins in the working directory according to specified match
655 """Update standins in the working directory according to specified match
648
656
649 This returns (possibly modified) ``match`` object to be used for
657 This returns (possibly modified) ``match`` object to be used for
650 subsequent commit process.
658 subsequent commit process.
651 """
659 """
652
660
653 ui = repo.ui
661 ui = repo.ui
654
662
655 # Case 1: user calls commit with no specific files or
663 # Case 1: user calls commit with no specific files or
656 # include/exclude patterns: refresh and commit all files that
664 # include/exclude patterns: refresh and commit all files that
657 # are "dirty".
665 # are "dirty".
658 if match is None or match.always():
666 if match is None or match.always():
659 # Spend a bit of time here to get a list of files we know
667 # Spend a bit of time here to get a list of files we know
660 # are modified so we can compare only against those.
668 # are modified so we can compare only against those.
661 # It can cost a lot of time (several seconds)
669 # It can cost a lot of time (several seconds)
662 # otherwise to update all standins if the largefiles are
670 # otherwise to update all standins if the largefiles are
663 # large.
671 # large.
664 lfdirstate = openlfdirstate(ui, repo)
672 lfdirstate = openlfdirstate(ui, repo)
665 dirtymatch = matchmod.always()
673 dirtymatch = matchmod.always()
666 unsure, s = lfdirstate.status(
674 unsure, s, mtime_boundary = lfdirstate.status(
667 dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False
675 dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False
668 )
676 )
669 modifiedfiles = unsure + s.modified + s.added + s.removed
677 modifiedfiles = unsure + s.modified + s.added + s.removed
670 lfiles = listlfiles(repo)
678 lfiles = listlfiles(repo)
671 # this only loops through largefiles that exist (not
679 # this only loops through largefiles that exist (not
672 # removed/renamed)
680 # removed/renamed)
673 for lfile in lfiles:
681 for lfile in lfiles:
674 if lfile in modifiedfiles:
682 if lfile in modifiedfiles:
675 fstandin = standin(lfile)
683 fstandin = standin(lfile)
676 if repo.wvfs.exists(fstandin):
684 if repo.wvfs.exists(fstandin):
677 # this handles the case where a rebase is being
685 # this handles the case where a rebase is being
678 # performed and the working copy is not updated
686 # performed and the working copy is not updated
679 # yet.
687 # yet.
680 if repo.wvfs.exists(lfile):
688 if repo.wvfs.exists(lfile):
681 updatestandin(repo, lfile, fstandin)
689 updatestandin(repo, lfile, fstandin)
682
690
683 return match
691 return match
684
692
685 lfiles = listlfiles(repo)
693 lfiles = listlfiles(repo)
686 match._files = repo._subdirlfs(match.files(), lfiles)
694 match._files = repo._subdirlfs(match.files(), lfiles)
687
695
688 # Case 2: user calls commit with specified patterns: refresh
696 # Case 2: user calls commit with specified patterns: refresh
689 # any matching big files.
697 # any matching big files.
690 smatcher = composestandinmatcher(repo, match)
698 smatcher = composestandinmatcher(repo, match)
691 standins = repo.dirstate.walk(
699 standins = repo.dirstate.walk(
692 smatcher, subrepos=[], unknown=False, ignored=False
700 smatcher, subrepos=[], unknown=False, ignored=False
693 )
701 )
694
702
695 # No matching big files: get out of the way and pass control to
703 # No matching big files: get out of the way and pass control to
696 # the usual commit() method.
704 # the usual commit() method.
697 if not standins:
705 if not standins:
698 return match
706 return match
699
707
700 # Refresh all matching big files. It's possible that the
708 # Refresh all matching big files. It's possible that the
701 # commit will end up failing, in which case the big files will
709 # commit will end up failing, in which case the big files will
702 # stay refreshed. No harm done: the user modified them and
710 # stay refreshed. No harm done: the user modified them and
703 # asked to commit them, so sooner or later we're going to
711 # asked to commit them, so sooner or later we're going to
704 # refresh the standins. Might as well leave them refreshed.
712 # refresh the standins. Might as well leave them refreshed.
705 lfdirstate = openlfdirstate(ui, repo)
713 lfdirstate = openlfdirstate(ui, repo)
706 for fstandin in standins:
714 for fstandin in standins:
707 lfile = splitstandin(fstandin)
715 lfile = splitstandin(fstandin)
708 if lfdirstate.get_entry(lfile).tracked:
716 if lfdirstate.get_entry(lfile).tracked:
709 updatestandin(repo, lfile, fstandin)
717 updatestandin(repo, lfile, fstandin)
710
718
711 # Cook up a new matcher that only matches regular files or
719 # Cook up a new matcher that only matches regular files or
712 # standins corresponding to the big files requested by the
720 # standins corresponding to the big files requested by the
713 # user. Have to modify _files to prevent commit() from
721 # user. Have to modify _files to prevent commit() from
714 # complaining "not tracked" for big files.
722 # complaining "not tracked" for big files.
715 match = copy.copy(match)
723 match = copy.copy(match)
716 origmatchfn = match.matchfn
724 origmatchfn = match.matchfn
717
725
718 # Check both the list of largefiles and the list of
726 # Check both the list of largefiles and the list of
719 # standins because if a largefile was removed, it
727 # standins because if a largefile was removed, it
720 # won't be in the list of largefiles at this point
728 # won't be in the list of largefiles at this point
721 match._files += sorted(standins)
729 match._files += sorted(standins)
722
730
723 actualfiles = []
731 actualfiles = []
724 for f in match._files:
732 for f in match._files:
725 fstandin = standin(f)
733 fstandin = standin(f)
726
734
727 # For largefiles, only one of the normal and standin should be
735 # For largefiles, only one of the normal and standin should be
728 # committed (except if one of them is a remove). In the case of a
736 # committed (except if one of them is a remove). In the case of a
729 # standin removal, drop the normal file if it is unknown to dirstate.
737 # standin removal, drop the normal file if it is unknown to dirstate.
730 # Thus, skip plain largefile names but keep the standin.
738 # Thus, skip plain largefile names but keep the standin.
731 if f in lfiles or fstandin in standins:
739 if f in lfiles or fstandin in standins:
732 if not repo.dirstate.get_entry(fstandin).removed:
740 if not repo.dirstate.get_entry(fstandin).removed:
733 if not repo.dirstate.get_entry(f).removed:
741 if not repo.dirstate.get_entry(f).removed:
734 continue
742 continue
735 elif not repo.dirstate.get_entry(f).any_tracked:
743 elif not repo.dirstate.get_entry(f).any_tracked:
736 continue
744 continue
737
745
738 actualfiles.append(f)
746 actualfiles.append(f)
739 match._files = actualfiles
747 match._files = actualfiles
740
748
741 def matchfn(f):
749 def matchfn(f):
742 if origmatchfn(f):
750 if origmatchfn(f):
743 return f not in lfiles
751 return f not in lfiles
744 else:
752 else:
745 return f in standins
753 return f in standins
746
754
747 match.matchfn = matchfn
755 match.matchfn = matchfn
748
756
749 return match
757 return match
750
758
751
759
752 class automatedcommithook(object):
760 class automatedcommithook(object):
753 """Stateful hook to update standins at the 1st commit of resuming
761 """Stateful hook to update standins at the 1st commit of resuming
754
762
755 For efficiency, updating standins in the working directory should
763 For efficiency, updating standins in the working directory should
756 be avoided while automated committing (like rebase, transplant and
764 be avoided while automated committing (like rebase, transplant and
757 so on), because they should be updated before committing.
765 so on), because they should be updated before committing.
758
766
759 But the 1st commit of resuming automated committing (e.g. ``rebase
767 But the 1st commit of resuming automated committing (e.g. ``rebase
760 --continue``) should update them, because largefiles may be
768 --continue``) should update them, because largefiles may be
761 modified manually.
769 modified manually.
762 """
770 """
763
771
764 def __init__(self, resuming):
772 def __init__(self, resuming):
765 self.resuming = resuming
773 self.resuming = resuming
766
774
767 def __call__(self, repo, match):
775 def __call__(self, repo, match):
768 if self.resuming:
776 if self.resuming:
769 self.resuming = False # avoids updating at subsequent commits
777 self.resuming = False # avoids updating at subsequent commits
770 return updatestandinsbymatch(repo, match)
778 return updatestandinsbymatch(repo, match)
771 else:
779 else:
772 return match
780 return match
773
781
774
782
775 def getstatuswriter(ui, repo, forcibly=None):
783 def getstatuswriter(ui, repo, forcibly=None):
776 """Return the function to write largefiles specific status out
784 """Return the function to write largefiles specific status out
777
785
778 If ``forcibly`` is ``None``, this returns the last element of
786 If ``forcibly`` is ``None``, this returns the last element of
779 ``repo._lfstatuswriters`` as "default" writer function.
787 ``repo._lfstatuswriters`` as "default" writer function.
780
788
781 Otherwise, this returns the function to always write out (or
789 Otherwise, this returns the function to always write out (or
782 ignore if ``not forcibly``) status.
790 ignore if ``not forcibly``) status.
783 """
791 """
784 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
792 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
785 return repo._lfstatuswriters[-1]
793 return repo._lfstatuswriters[-1]
786 else:
794 else:
787 if forcibly:
795 if forcibly:
788 return ui.status # forcibly WRITE OUT
796 return ui.status # forcibly WRITE OUT
789 else:
797 else:
790 return lambda *msg, **opts: None # forcibly IGNORE
798 return lambda *msg, **opts: None # forcibly IGNORE
@@ -1,1857 +1,1866 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13 import os
13 import os
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 from mercurial.pycompat import open
17 from mercurial.pycompat import open
18
18
19 from mercurial.hgweb import webcommands
19 from mercurial.hgweb import webcommands
20
20
21 from mercurial import (
21 from mercurial import (
22 archival,
22 archival,
23 cmdutil,
23 cmdutil,
24 copies as copiesmod,
24 copies as copiesmod,
25 error,
25 error,
26 exchange,
26 exchange,
27 extensions,
27 extensions,
28 exthelper,
28 exthelper,
29 filemerge,
29 filemerge,
30 hg,
30 hg,
31 logcmdutil,
31 logcmdutil,
32 match as matchmod,
32 match as matchmod,
33 merge,
33 merge,
34 mergestate as mergestatemod,
34 mergestate as mergestatemod,
35 pathutil,
35 pathutil,
36 pycompat,
36 pycompat,
37 scmutil,
37 scmutil,
38 smartset,
38 smartset,
39 subrepo,
39 subrepo,
40 url as urlmod,
40 url as urlmod,
41 util,
41 util,
42 )
42 )
43
43
44 from mercurial.upgrade_utils import (
44 from mercurial.upgrade_utils import (
45 actions as upgrade_actions,
45 actions as upgrade_actions,
46 )
46 )
47
47
48 from . import (
48 from . import (
49 lfcommands,
49 lfcommands,
50 lfutil,
50 lfutil,
51 storefactory,
51 storefactory,
52 )
52 )
53
53
54 ACTION_ADD = mergestatemod.ACTION_ADD
55 ACTION_DELETED_CHANGED = mergestatemod.ACTION_DELETED_CHANGED
56 ACTION_GET = mergestatemod.ACTION_GET
57 ACTION_KEEP = mergestatemod.ACTION_KEEP
58 ACTION_REMOVE = mergestatemod.ACTION_REMOVE
59
54 eh = exthelper.exthelper()
60 eh = exthelper.exthelper()
55
61
56 lfstatus = lfutil.lfstatus
62 lfstatus = lfutil.lfstatus
57
63
58 MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'lfmr'
64 MERGE_ACTION_LARGEFILE_MARK_REMOVED = mergestatemod.MergeAction('lfmr')
59
65
60 # -- Utility functions: commonly/repeatedly needed functionality ---------------
66 # -- Utility functions: commonly/repeatedly needed functionality ---------------
61
67
62
68
63 def composelargefilematcher(match, manifest):
69 def composelargefilematcher(match, manifest):
64 """create a matcher that matches only the largefiles in the original
70 """create a matcher that matches only the largefiles in the original
65 matcher"""
71 matcher"""
66 m = copy.copy(match)
72 m = copy.copy(match)
67 lfile = lambda f: lfutil.standin(f) in manifest
73 lfile = lambda f: lfutil.standin(f) in manifest
68 m._files = [lf for lf in m._files if lfile(lf)]
74 m._files = [lf for lf in m._files if lfile(lf)]
69 m._fileset = set(m._files)
75 m._fileset = set(m._files)
70 m.always = lambda: False
76 m.always = lambda: False
71 origmatchfn = m.matchfn
77 origmatchfn = m.matchfn
72 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
78 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
73 return m
79 return m
74
80
75
81
76 def composenormalfilematcher(match, manifest, exclude=None):
82 def composenormalfilematcher(match, manifest, exclude=None):
77 excluded = set()
83 excluded = set()
78 if exclude is not None:
84 if exclude is not None:
79 excluded.update(exclude)
85 excluded.update(exclude)
80
86
81 m = copy.copy(match)
87 m = copy.copy(match)
82 notlfile = lambda f: not (
88 notlfile = lambda f: not (
83 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
89 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
84 )
90 )
85 m._files = [lf for lf in m._files if notlfile(lf)]
91 m._files = [lf for lf in m._files if notlfile(lf)]
86 m._fileset = set(m._files)
92 m._fileset = set(m._files)
87 m.always = lambda: False
93 m.always = lambda: False
88 origmatchfn = m.matchfn
94 origmatchfn = m.matchfn
89 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
95 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
90 return m
96 return m
91
97
92
98
93 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
99 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
94 large = opts.get('large')
100 large = opts.get('large')
95 lfsize = lfutil.getminsize(
101 lfsize = lfutil.getminsize(
96 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
102 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
97 )
103 )
98
104
99 lfmatcher = None
105 lfmatcher = None
100 if lfutil.islfilesrepo(repo):
106 if lfutil.islfilesrepo(repo):
101 lfpats = ui.configlist(lfutil.longname, b'patterns')
107 lfpats = ui.configlist(lfutil.longname, b'patterns')
102 if lfpats:
108 if lfpats:
103 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
109 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
104
110
105 lfnames = []
111 lfnames = []
106 m = matcher
112 m = matcher
107
113
108 wctx = repo[None]
114 wctx = repo[None]
109 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
115 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
110 exact = m.exact(f)
116 exact = m.exact(f)
111 lfile = lfutil.standin(f) in wctx
117 lfile = lfutil.standin(f) in wctx
112 nfile = f in wctx
118 nfile = f in wctx
113 exists = lfile or nfile
119 exists = lfile or nfile
114
120
115 # Don't warn the user when they attempt to add a normal tracked file.
121 # Don't warn the user when they attempt to add a normal tracked file.
116 # The normal add code will do that for us.
122 # The normal add code will do that for us.
117 if exact and exists:
123 if exact and exists:
118 if lfile:
124 if lfile:
119 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
125 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
120 continue
126 continue
121
127
122 if (exact or not exists) and not lfutil.isstandin(f):
128 if (exact or not exists) and not lfutil.isstandin(f):
123 # In case the file was removed previously, but not committed
129 # In case the file was removed previously, but not committed
124 # (issue3507)
130 # (issue3507)
125 if not repo.wvfs.exists(f):
131 if not repo.wvfs.exists(f):
126 continue
132 continue
127
133
128 abovemin = (
134 abovemin = (
129 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
135 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
130 )
136 )
131 if large or abovemin or (lfmatcher and lfmatcher(f)):
137 if large or abovemin or (lfmatcher and lfmatcher(f)):
132 lfnames.append(f)
138 lfnames.append(f)
133 if ui.verbose or not exact:
139 if ui.verbose or not exact:
134 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
140 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
135
141
136 bad = []
142 bad = []
137
143
138 # Need to lock, otherwise there could be a race condition between
144 # Need to lock, otherwise there could be a race condition between
139 # when standins are created and added to the repo.
145 # when standins are created and added to the repo.
140 with repo.wlock():
146 with repo.wlock():
141 if not opts.get('dry_run'):
147 if not opts.get('dry_run'):
142 standins = []
148 standins = []
143 lfdirstate = lfutil.openlfdirstate(ui, repo)
149 lfdirstate = lfutil.openlfdirstate(ui, repo)
144 for f in lfnames:
150 for f in lfnames:
145 standinname = lfutil.standin(f)
151 standinname = lfutil.standin(f)
146 lfutil.writestandin(
152 lfutil.writestandin(
147 repo,
153 repo,
148 standinname,
154 standinname,
149 hash=b'',
155 hash=b'',
150 executable=lfutil.getexecutable(repo.wjoin(f)),
156 executable=lfutil.getexecutable(repo.wjoin(f)),
151 )
157 )
152 standins.append(standinname)
158 standins.append(standinname)
153 lfdirstate.set_tracked(f)
159 lfdirstate.set_tracked(f)
154 lfdirstate.write(repo.currenttransaction())
160 lfdirstate.write(repo.currenttransaction())
155 bad += [
161 bad += [
156 lfutil.splitstandin(f)
162 lfutil.splitstandin(f)
157 for f in repo[None].add(standins)
163 for f in repo[None].add(standins)
158 if f in m.files()
164 if f in m.files()
159 ]
165 ]
160
166
161 added = [f for f in lfnames if f not in bad]
167 added = [f for f in lfnames if f not in bad]
162 return added, bad
168 return added, bad
163
169
164
170
165 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
171 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
166 after = opts.get('after')
172 after = opts.get('after')
167 m = composelargefilematcher(matcher, repo[None].manifest())
173 m = composelargefilematcher(matcher, repo[None].manifest())
168 with lfstatus(repo):
174 with lfstatus(repo):
169 s = repo.status(match=m, clean=not isaddremove)
175 s = repo.status(match=m, clean=not isaddremove)
170 manifest = repo[None].manifest()
176 manifest = repo[None].manifest()
171 modified, added, deleted, clean = [
177 modified, added, deleted, clean = [
172 [f for f in list if lfutil.standin(f) in manifest]
178 [f for f in list if lfutil.standin(f) in manifest]
173 for list in (s.modified, s.added, s.deleted, s.clean)
179 for list in (s.modified, s.added, s.deleted, s.clean)
174 ]
180 ]
175
181
176 def warn(files, msg):
182 def warn(files, msg):
177 for f in files:
183 for f in files:
178 ui.warn(msg % uipathfn(f))
184 ui.warn(msg % uipathfn(f))
179 return int(len(files) > 0)
185 return int(len(files) > 0)
180
186
181 if after:
187 if after:
182 remove = deleted
188 remove = deleted
183 result = warn(
189 result = warn(
184 modified + added + clean, _(b'not removing %s: file still exists\n')
190 modified + added + clean, _(b'not removing %s: file still exists\n')
185 )
191 )
186 else:
192 else:
187 remove = deleted + clean
193 remove = deleted + clean
188 result = warn(
194 result = warn(
189 modified,
195 modified,
190 _(
196 _(
191 b'not removing %s: file is modified (use -f'
197 b'not removing %s: file is modified (use -f'
192 b' to force removal)\n'
198 b' to force removal)\n'
193 ),
199 ),
194 )
200 )
195 result = (
201 result = (
196 warn(
202 warn(
197 added,
203 added,
198 _(
204 _(
199 b'not removing %s: file has been marked for add'
205 b'not removing %s: file has been marked for add'
200 b' (use forget to undo)\n'
206 b' (use forget to undo)\n'
201 ),
207 ),
202 )
208 )
203 or result
209 or result
204 )
210 )
205
211
206 # Need to lock because standin files are deleted then removed from the
212 # Need to lock because standin files are deleted then removed from the
207 # repository and we could race in-between.
213 # repository and we could race in-between.
208 with repo.wlock():
214 with repo.wlock():
209 lfdirstate = lfutil.openlfdirstate(ui, repo)
215 lfdirstate = lfutil.openlfdirstate(ui, repo)
210 for f in sorted(remove):
216 for f in sorted(remove):
211 if ui.verbose or not m.exact(f):
217 if ui.verbose or not m.exact(f):
212 ui.status(_(b'removing %s\n') % uipathfn(f))
218 ui.status(_(b'removing %s\n') % uipathfn(f))
213
219
214 if not dryrun:
220 if not dryrun:
215 if not after:
221 if not after:
216 repo.wvfs.unlinkpath(f, ignoremissing=True)
222 repo.wvfs.unlinkpath(f, ignoremissing=True)
217
223
218 if dryrun:
224 if dryrun:
219 return result
225 return result
220
226
221 remove = [lfutil.standin(f) for f in remove]
227 remove = [lfutil.standin(f) for f in remove]
222 # If this is being called by addremove, let the original addremove
228 # If this is being called by addremove, let the original addremove
223 # function handle this.
229 # function handle this.
224 if not isaddremove:
230 if not isaddremove:
225 for f in remove:
231 for f in remove:
226 repo.wvfs.unlinkpath(f, ignoremissing=True)
232 repo.wvfs.unlinkpath(f, ignoremissing=True)
227 repo[None].forget(remove)
233 repo[None].forget(remove)
228
234
229 for f in remove:
235 for f in remove:
230 lfdirstate.set_untracked(lfutil.splitstandin(f))
236 lfdirstate.set_untracked(lfutil.splitstandin(f))
231
237
232 lfdirstate.write(repo.currenttransaction())
238 lfdirstate.write(repo.currenttransaction())
233
239
234 return result
240 return result
235
241
236
242
237 # For overriding mercurial.hgweb.webcommands so that largefiles will
243 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 # appear at their right place in the manifests.
244 # appear at their right place in the manifests.
239 @eh.wrapfunction(webcommands, b'decodepath')
245 @eh.wrapfunction(webcommands, b'decodepath')
240 def decodepath(orig, path):
246 def decodepath(orig, path):
241 return lfutil.splitstandin(path) or path
247 return lfutil.splitstandin(path) or path
242
248
243
249
244 # -- Wrappers: modify existing commands --------------------------------
250 # -- Wrappers: modify existing commands --------------------------------
245
251
246
252
247 @eh.wrapcommand(
253 @eh.wrapcommand(
248 b'add',
254 b'add',
249 opts=[
255 opts=[
250 (b'', b'large', None, _(b'add as largefile')),
256 (b'', b'large', None, _(b'add as largefile')),
251 (b'', b'normal', None, _(b'add as normal file')),
257 (b'', b'normal', None, _(b'add as normal file')),
252 (
258 (
253 b'',
259 b'',
254 b'lfsize',
260 b'lfsize',
255 b'',
261 b'',
256 _(
262 _(
257 b'add all files above this size (in megabytes) '
263 b'add all files above this size (in megabytes) '
258 b'as largefiles (default: 10)'
264 b'as largefiles (default: 10)'
259 ),
265 ),
260 ),
266 ),
261 ],
267 ],
262 )
268 )
263 def overrideadd(orig, ui, repo, *pats, **opts):
269 def overrideadd(orig, ui, repo, *pats, **opts):
264 if opts.get('normal') and opts.get('large'):
270 if opts.get('normal') and opts.get('large'):
265 raise error.Abort(_(b'--normal cannot be used with --large'))
271 raise error.Abort(_(b'--normal cannot be used with --large'))
266 return orig(ui, repo, *pats, **opts)
272 return orig(ui, repo, *pats, **opts)
267
273
268
274
269 @eh.wrapfunction(cmdutil, b'add')
275 @eh.wrapfunction(cmdutil, b'add')
270 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
276 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
271 # The --normal flag short circuits this override
277 # The --normal flag short circuits this override
272 if opts.get('normal'):
278 if opts.get('normal'):
273 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
279 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
274
280
275 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
281 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
276 normalmatcher = composenormalfilematcher(
282 normalmatcher = composenormalfilematcher(
277 matcher, repo[None].manifest(), ladded
283 matcher, repo[None].manifest(), ladded
278 )
284 )
279 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
285 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
280
286
281 bad.extend(f for f in lbad)
287 bad.extend(f for f in lbad)
282 return bad
288 return bad
283
289
284
290
285 @eh.wrapfunction(cmdutil, b'remove')
291 @eh.wrapfunction(cmdutil, b'remove')
286 def cmdutilremove(
292 def cmdutilremove(
287 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
293 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
288 ):
294 ):
289 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
295 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
290 result = orig(
296 result = orig(
291 ui,
297 ui,
292 repo,
298 repo,
293 normalmatcher,
299 normalmatcher,
294 prefix,
300 prefix,
295 uipathfn,
301 uipathfn,
296 after,
302 after,
297 force,
303 force,
298 subrepos,
304 subrepos,
299 dryrun,
305 dryrun,
300 )
306 )
301 return (
307 return (
302 removelargefiles(
308 removelargefiles(
303 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
309 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
304 )
310 )
305 or result
311 or result
306 )
312 )
307
313
308
314
309 @eh.wrapfunction(subrepo.hgsubrepo, b'status')
315 @eh.wrapfunction(subrepo.hgsubrepo, b'status')
310 def overridestatusfn(orig, repo, rev2, **opts):
316 def overridestatusfn(orig, repo, rev2, **opts):
311 with lfstatus(repo._repo):
317 with lfstatus(repo._repo):
312 return orig(repo, rev2, **opts)
318 return orig(repo, rev2, **opts)
313
319
314
320
315 @eh.wrapcommand(b'status')
321 @eh.wrapcommand(b'status')
316 def overridestatus(orig, ui, repo, *pats, **opts):
322 def overridestatus(orig, ui, repo, *pats, **opts):
317 with lfstatus(repo):
323 with lfstatus(repo):
318 return orig(ui, repo, *pats, **opts)
324 return orig(ui, repo, *pats, **opts)
319
325
320
326
321 @eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
327 @eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
322 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
328 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
323 with lfstatus(repo._repo):
329 with lfstatus(repo._repo):
324 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
330 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
325
331
326
332
327 @eh.wrapcommand(b'log')
333 @eh.wrapcommand(b'log')
328 def overridelog(orig, ui, repo, *pats, **opts):
334 def overridelog(orig, ui, repo, *pats, **opts):
329 def overridematchandpats(
335 def overridematchandpats(
330 orig,
336 orig,
331 ctx,
337 ctx,
332 pats=(),
338 pats=(),
333 opts=None,
339 opts=None,
334 globbed=False,
340 globbed=False,
335 default=b'relpath',
341 default=b'relpath',
336 badfn=None,
342 badfn=None,
337 ):
343 ):
338 """Matcher that merges root directory with .hglf, suitable for log.
344 """Matcher that merges root directory with .hglf, suitable for log.
339 It is still possible to match .hglf directly.
345 It is still possible to match .hglf directly.
340 For any listed files run log on the standin too.
346 For any listed files run log on the standin too.
341 matchfn tries both the given filename and with .hglf stripped.
347 matchfn tries both the given filename and with .hglf stripped.
342 """
348 """
343 if opts is None:
349 if opts is None:
344 opts = {}
350 opts = {}
345 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
351 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
346 m, p = copy.copy(matchandpats)
352 m, p = copy.copy(matchandpats)
347
353
348 if m.always():
354 if m.always():
349 # We want to match everything anyway, so there's no benefit trying
355 # We want to match everything anyway, so there's no benefit trying
350 # to add standins.
356 # to add standins.
351 return matchandpats
357 return matchandpats
352
358
353 pats = set(p)
359 pats = set(p)
354
360
355 def fixpats(pat, tostandin=lfutil.standin):
361 def fixpats(pat, tostandin=lfutil.standin):
356 if pat.startswith(b'set:'):
362 if pat.startswith(b'set:'):
357 return pat
363 return pat
358
364
359 kindpat = matchmod._patsplit(pat, None)
365 kindpat = matchmod._patsplit(pat, None)
360
366
361 if kindpat[0] is not None:
367 if kindpat[0] is not None:
362 return kindpat[0] + b':' + tostandin(kindpat[1])
368 return kindpat[0] + b':' + tostandin(kindpat[1])
363 return tostandin(kindpat[1])
369 return tostandin(kindpat[1])
364
370
365 cwd = repo.getcwd()
371 cwd = repo.getcwd()
366 if cwd:
372 if cwd:
367 hglf = lfutil.shortname
373 hglf = lfutil.shortname
368 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
374 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
369
375
370 def tostandin(f):
376 def tostandin(f):
371 # The file may already be a standin, so truncate the back
377 # The file may already be a standin, so truncate the back
372 # prefix and test before mangling it. This avoids turning
378 # prefix and test before mangling it. This avoids turning
373 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
379 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
374 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
380 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
375 return f
381 return f
376
382
377 # An absolute path is from outside the repo, so truncate the
383 # An absolute path is from outside the repo, so truncate the
378 # path to the root before building the standin. Otherwise cwd
384 # path to the root before building the standin. Otherwise cwd
379 # is somewhere in the repo, relative to root, and needs to be
385 # is somewhere in the repo, relative to root, and needs to be
380 # prepended before building the standin.
386 # prepended before building the standin.
381 if os.path.isabs(cwd):
387 if os.path.isabs(cwd):
382 f = f[len(back) :]
388 f = f[len(back) :]
383 else:
389 else:
384 f = cwd + b'/' + f
390 f = cwd + b'/' + f
385 return back + lfutil.standin(f)
391 return back + lfutil.standin(f)
386
392
387 else:
393 else:
388
394
389 def tostandin(f):
395 def tostandin(f):
390 if lfutil.isstandin(f):
396 if lfutil.isstandin(f):
391 return f
397 return f
392 return lfutil.standin(f)
398 return lfutil.standin(f)
393
399
394 pats.update(fixpats(f, tostandin) for f in p)
400 pats.update(fixpats(f, tostandin) for f in p)
395
401
396 for i in range(0, len(m._files)):
402 for i in range(0, len(m._files)):
397 # Don't add '.hglf' to m.files, since that is already covered by '.'
403 # Don't add '.hglf' to m.files, since that is already covered by '.'
398 if m._files[i] == b'.':
404 if m._files[i] == b'.':
399 continue
405 continue
400 standin = lfutil.standin(m._files[i])
406 standin = lfutil.standin(m._files[i])
401 # If the "standin" is a directory, append instead of replace to
407 # If the "standin" is a directory, append instead of replace to
402 # support naming a directory on the command line with only
408 # support naming a directory on the command line with only
403 # largefiles. The original directory is kept to support normal
409 # largefiles. The original directory is kept to support normal
404 # files.
410 # files.
405 if standin in ctx:
411 if standin in ctx:
406 m._files[i] = standin
412 m._files[i] = standin
407 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
413 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
408 m._files.append(standin)
414 m._files.append(standin)
409
415
410 m._fileset = set(m._files)
416 m._fileset = set(m._files)
411 m.always = lambda: False
417 m.always = lambda: False
412 origmatchfn = m.matchfn
418 origmatchfn = m.matchfn
413
419
414 def lfmatchfn(f):
420 def lfmatchfn(f):
415 lf = lfutil.splitstandin(f)
421 lf = lfutil.splitstandin(f)
416 if lf is not None and origmatchfn(lf):
422 if lf is not None and origmatchfn(lf):
417 return True
423 return True
418 r = origmatchfn(f)
424 r = origmatchfn(f)
419 return r
425 return r
420
426
421 m.matchfn = lfmatchfn
427 m.matchfn = lfmatchfn
422
428
423 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
429 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
424 return m, pats
430 return m, pats
425
431
426 # For hg log --patch, the match object is used in two different senses:
432 # For hg log --patch, the match object is used in two different senses:
427 # (1) to determine what revisions should be printed out, and
433 # (1) to determine what revisions should be printed out, and
428 # (2) to determine what files to print out diffs for.
434 # (2) to determine what files to print out diffs for.
429 # The magic matchandpats override should be used for case (1) but not for
435 # The magic matchandpats override should be used for case (1) but not for
430 # case (2).
436 # case (2).
431 oldmatchandpats = scmutil.matchandpats
437 oldmatchandpats = scmutil.matchandpats
432
438
433 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
439 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
434 wctx = repo[None]
440 wctx = repo[None]
435 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
441 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
436 return lambda ctx: match
442 return lambda ctx: match
437
443
438 wrappedmatchandpats = extensions.wrappedfunction(
444 wrappedmatchandpats = extensions.wrappedfunction(
439 scmutil, b'matchandpats', overridematchandpats
445 scmutil, b'matchandpats', overridematchandpats
440 )
446 )
441 wrappedmakefilematcher = extensions.wrappedfunction(
447 wrappedmakefilematcher = extensions.wrappedfunction(
442 logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
448 logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
443 )
449 )
444 with wrappedmatchandpats, wrappedmakefilematcher:
450 with wrappedmatchandpats, wrappedmakefilematcher:
445 return orig(ui, repo, *pats, **opts)
451 return orig(ui, repo, *pats, **opts)
446
452
447
453
448 @eh.wrapcommand(
454 @eh.wrapcommand(
449 b'verify',
455 b'verify',
450 opts=[
456 opts=[
451 (
457 (
452 b'',
458 b'',
453 b'large',
459 b'large',
454 None,
460 None,
455 _(b'verify that all largefiles in current revision exists'),
461 _(b'verify that all largefiles in current revision exists'),
456 ),
462 ),
457 (
463 (
458 b'',
464 b'',
459 b'lfa',
465 b'lfa',
460 None,
466 None,
461 _(b'verify largefiles in all revisions, not just current'),
467 _(b'verify largefiles in all revisions, not just current'),
462 ),
468 ),
463 (
469 (
464 b'',
470 b'',
465 b'lfc',
471 b'lfc',
466 None,
472 None,
467 _(b'verify local largefile contents, not just existence'),
473 _(b'verify local largefile contents, not just existence'),
468 ),
474 ),
469 ],
475 ],
470 )
476 )
471 def overrideverify(orig, ui, repo, *pats, **opts):
477 def overrideverify(orig, ui, repo, *pats, **opts):
472 large = opts.pop('large', False)
478 large = opts.pop('large', False)
473 all = opts.pop('lfa', False)
479 all = opts.pop('lfa', False)
474 contents = opts.pop('lfc', False)
480 contents = opts.pop('lfc', False)
475
481
476 result = orig(ui, repo, *pats, **opts)
482 result = orig(ui, repo, *pats, **opts)
477 if large or all or contents:
483 if large or all or contents:
478 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
484 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
479 return result
485 return result
480
486
481
487
482 @eh.wrapcommand(
488 @eh.wrapcommand(
483 b'debugstate',
489 b'debugstate',
484 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
490 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
485 )
491 )
486 def overridedebugstate(orig, ui, repo, *pats, **opts):
492 def overridedebugstate(orig, ui, repo, *pats, **opts):
487 large = opts.pop('large', False)
493 large = opts.pop('large', False)
488 if large:
494 if large:
489
495
490 class fakerepo(object):
496 class fakerepo(object):
491 dirstate = lfutil.openlfdirstate(ui, repo)
497 dirstate = lfutil.openlfdirstate(ui, repo)
492
498
493 orig(ui, fakerepo, *pats, **opts)
499 orig(ui, fakerepo, *pats, **opts)
494 else:
500 else:
495 orig(ui, repo, *pats, **opts)
501 orig(ui, repo, *pats, **opts)
496
502
497
503
498 # Before starting the manifest merge, merge.updates will call
504 # Before starting the manifest merge, merge.updates will call
499 # _checkunknownfile to check if there are any files in the merged-in
505 # _checkunknownfile to check if there are any files in the merged-in
500 # changeset that collide with unknown files in the working copy.
506 # changeset that collide with unknown files in the working copy.
501 #
507 #
502 # The largefiles are seen as unknown, so this prevents us from merging
508 # The largefiles are seen as unknown, so this prevents us from merging
503 # in a file 'foo' if we already have a largefile with the same name.
509 # in a file 'foo' if we already have a largefile with the same name.
504 #
510 #
505 # The overridden function filters the unknown files by removing any
511 # The overridden function filters the unknown files by removing any
506 # largefiles. This makes the merge proceed and we can then handle this
512 # largefiles. This makes the merge proceed and we can then handle this
507 # case further in the overridden calculateupdates function below.
513 # case further in the overridden calculateupdates function below.
508 @eh.wrapfunction(merge, b'_checkunknownfile')
514 @eh.wrapfunction(merge, b'_checkunknownfile')
509 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
515 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
510 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
516 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
511 return False
517 return False
512 return origfn(repo, wctx, mctx, f, f2)
518 return origfn(repo, wctx, mctx, f, f2)
513
519
514
520
515 # The manifest merge handles conflicts on the manifest level. We want
521 # The manifest merge handles conflicts on the manifest level. We want
516 # to handle changes in largefile-ness of files at this level too.
522 # to handle changes in largefile-ness of files at this level too.
517 #
523 #
518 # The strategy is to run the original calculateupdates and then process
524 # The strategy is to run the original calculateupdates and then process
519 # the action list it outputs. There are two cases we need to deal with:
525 # the action list it outputs. There are two cases we need to deal with:
520 #
526 #
521 # 1. Normal file in p1, largefile in p2. Here the largefile is
527 # 1. Normal file in p1, largefile in p2. Here the largefile is
522 # detected via its standin file, which will enter the working copy
528 # detected via its standin file, which will enter the working copy
523 # with a "get" action. It is not "merge" since the standin is all
529 # with a "get" action. It is not "merge" since the standin is all
524 # Mercurial is concerned with at this level -- the link to the
530 # Mercurial is concerned with at this level -- the link to the
525 # existing normal file is not relevant here.
531 # existing normal file is not relevant here.
526 #
532 #
527 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
533 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
528 # since the largefile will be present in the working copy and
534 # since the largefile will be present in the working copy and
529 # different from the normal file in p2. Mercurial therefore
535 # different from the normal file in p2. Mercurial therefore
530 # triggers a merge action.
536 # triggers a merge action.
531 #
537 #
532 # In both cases, we prompt the user and emit new actions to either
538 # In both cases, we prompt the user and emit new actions to either
533 # remove the standin (if the normal file was kept) or to remove the
539 # remove the standin (if the normal file was kept) or to remove the
534 # normal file and get the standin (if the largefile was kept). The
540 # normal file and get the standin (if the largefile was kept). The
535 # default prompt answer is to use the largefile version since it was
541 # default prompt answer is to use the largefile version since it was
536 # presumably changed on purpose.
542 # presumably changed on purpose.
537 #
543 #
538 # Finally, the merge.applyupdates function will then take care of
544 # Finally, the merge.applyupdates function will then take care of
539 # writing the files into the working copy and lfcommands.updatelfiles
545 # writing the files into the working copy and lfcommands.updatelfiles
540 # will update the largefiles.
546 # will update the largefiles.
541 @eh.wrapfunction(merge, b'calculateupdates')
547 @eh.wrapfunction(merge, b'calculateupdates')
542 def overridecalculateupdates(
548 def overridecalculateupdates(
543 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
549 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
544 ):
550 ):
545 overwrite = force and not branchmerge
551 overwrite = force and not branchmerge
546 mresult = origfn(
552 mresult = origfn(
547 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
553 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
548 )
554 )
549
555
550 if overwrite:
556 if overwrite:
551 return mresult
557 return mresult
552
558
553 # Convert to dictionary with filename as key and action as value.
559 # Convert to dictionary with filename as key and action as value.
554 lfiles = set()
560 lfiles = set()
555 for f in mresult.files():
561 for f in mresult.files():
556 splitstandin = lfutil.splitstandin(f)
562 splitstandin = lfutil.splitstandin(f)
557 if splitstandin is not None and splitstandin in p1:
563 if splitstandin is not None and splitstandin in p1:
558 lfiles.add(splitstandin)
564 lfiles.add(splitstandin)
559 elif lfutil.standin(f) in p1:
565 elif lfutil.standin(f) in p1:
560 lfiles.add(f)
566 lfiles.add(f)
561
567
562 for lfile in sorted(lfiles):
568 for lfile in sorted(lfiles):
563 standin = lfutil.standin(lfile)
569 standin = lfutil.standin(lfile)
564 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
570 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
565 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
571 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
566 if sm in (b'g', b'dc') and lm != b'r':
572
567 if sm == b'dc':
573 if sm in (ACTION_GET, ACTION_DELETED_CHANGED) and lm != ACTION_REMOVE:
574 if sm == ACTION_DELETED_CHANGED:
568 f1, f2, fa, move, anc = sargs
575 f1, f2, fa, move, anc = sargs
569 sargs = (p2[f2].flags(), False)
576 sargs = (p2[f2].flags(), False)
570 # Case 1: normal file in the working copy, largefile in
577 # Case 1: normal file in the working copy, largefile in
571 # the second parent
578 # the second parent
572 usermsg = (
579 usermsg = (
573 _(
580 _(
574 b'remote turned local normal file %s into a largefile\n'
581 b'remote turned local normal file %s into a largefile\n'
575 b'use (l)argefile or keep (n)ormal file?'
582 b'use (l)argefile or keep (n)ormal file?'
576 b'$$ &Largefile $$ &Normal file'
583 b'$$ &Largefile $$ &Normal file'
577 )
584 )
578 % lfile
585 % lfile
579 )
586 )
580 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
587 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
581 mresult.addfile(lfile, b'r', None, b'replaced by standin')
588 mresult.addfile(
582 mresult.addfile(standin, b'g', sargs, b'replaces standin')
589 lfile, ACTION_REMOVE, None, b'replaced by standin'
590 )
591 mresult.addfile(standin, ACTION_GET, sargs, b'replaces standin')
583 else: # keep local normal file
592 else: # keep local normal file
584 mresult.addfile(lfile, b'k', None, b'replaces standin')
593 mresult.addfile(lfile, ACTION_KEEP, None, b'replaces standin')
585 if branchmerge:
594 if branchmerge:
586 mresult.addfile(
595 mresult.addfile(
587 standin,
596 standin,
588 b'k',
597 ACTION_KEEP,
589 None,
598 None,
590 b'replaced by non-standin',
599 b'replaced by non-standin',
591 )
600 )
592 else:
601 else:
593 mresult.addfile(
602 mresult.addfile(
594 standin,
603 standin,
595 b'r',
604 ACTION_REMOVE,
596 None,
605 None,
597 b'replaced by non-standin',
606 b'replaced by non-standin',
598 )
607 )
599 elif lm in (b'g', b'dc') and sm != b'r':
608 if lm in (ACTION_GET, ACTION_DELETED_CHANGED) and sm != ACTION_REMOVE:
600 if lm == b'dc':
609 if lm == ACTION_DELETED_CHANGED:
601 f1, f2, fa, move, anc = largs
610 f1, f2, fa, move, anc = largs
602 largs = (p2[f2].flags(), False)
611 largs = (p2[f2].flags(), False)
603 # Case 2: largefile in the working copy, normal file in
612 # Case 2: largefile in the working copy, normal file in
604 # the second parent
613 # the second parent
605 usermsg = (
614 usermsg = (
606 _(
615 _(
607 b'remote turned local largefile %s into a normal file\n'
616 b'remote turned local largefile %s into a normal file\n'
608 b'keep (l)argefile or use (n)ormal file?'
617 b'keep (l)argefile or use (n)ormal file?'
609 b'$$ &Largefile $$ &Normal file'
618 b'$$ &Largefile $$ &Normal file'
610 )
619 )
611 % lfile
620 % lfile
612 )
621 )
613 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
622 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
614 if branchmerge:
623 if branchmerge:
615 # largefile can be restored from standin safely
624 # largefile can be restored from standin safely
616 mresult.addfile(
625 mresult.addfile(
617 lfile,
626 lfile,
618 b'k',
627 ACTION_KEEP,
619 None,
628 None,
620 b'replaced by standin',
629 b'replaced by standin',
621 )
630 )
622 mresult.addfile(standin, b'k', None, b'replaces standin')
631 mresult.addfile(
632 standin, ACTION_KEEP, None, b'replaces standin'
633 )
623 else:
634 else:
624 # "lfile" should be marked as "removed" without
635 # "lfile" should be marked as "removed" without
625 # removal of itself
636 # removal of itself
626 mresult.addfile(
637 mresult.addfile(
627 lfile,
638 lfile,
628 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
639 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
629 None,
640 None,
630 b'forget non-standin largefile',
641 b'forget non-standin largefile',
631 )
642 )
632
643
633 # linear-merge should treat this largefile as 're-added'
644 # linear-merge should treat this largefile as 're-added'
634 mresult.addfile(standin, b'a', None, b'keep standin')
645 mresult.addfile(standin, ACTION_ADD, None, b'keep standin')
635 else: # pick remote normal file
646 else: # pick remote normal file
636 mresult.addfile(lfile, b'g', largs, b'replaces standin')
647 mresult.addfile(lfile, ACTION_GET, largs, b'replaces standin')
637 mresult.addfile(
648 mresult.addfile(
638 standin,
649 standin,
639 b'r',
650 ACTION_REMOVE,
640 None,
651 None,
641 b'replaced by non-standin',
652 b'replaced by non-standin',
642 )
653 )
643
654
644 return mresult
655 return mresult
645
656
646
657
647 @eh.wrapfunction(mergestatemod, b'recordupdates')
658 @eh.wrapfunction(mergestatemod, b'recordupdates')
648 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
659 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
649 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
660 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
650 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
661 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
651 with lfdirstate.parentchange():
662 with lfdirstate.parentchange():
652 for lfile, args, msg in actions[
663 for lfile, args, msg in actions[
653 MERGE_ACTION_LARGEFILE_MARK_REMOVED
664 MERGE_ACTION_LARGEFILE_MARK_REMOVED
654 ]:
665 ]:
655 # this should be executed before 'orig', to execute 'remove'
666 # this should be executed before 'orig', to execute 'remove'
656 # before all other actions
667 # before all other actions
657 repo.dirstate.update_file(
668 repo.dirstate.update_file(
658 lfile, p1_tracked=True, wc_tracked=False
669 lfile, p1_tracked=True, wc_tracked=False
659 )
670 )
660 # make sure lfile doesn't get synclfdirstate'd as normal
671 # make sure lfile doesn't get synclfdirstate'd as normal
661 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
672 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
662 lfdirstate.write(repo.currenttransaction())
673 lfdirstate.write(repo.currenttransaction())
663
674
664 return orig(repo, actions, branchmerge, getfiledata)
675 return orig(repo, actions, branchmerge, getfiledata)
665
676
666
677
667 # Override filemerge to prompt the user about how they wish to merge
678 # Override filemerge to prompt the user about how they wish to merge
668 # largefiles. This will handle identical edits without prompting the user.
679 # largefiles. This will handle identical edits without prompting the user.
669 @eh.wrapfunction(filemerge, b'_filemerge')
680 @eh.wrapfunction(filemerge, b'filemerge')
670 def overridefilemerge(
681 def overridefilemerge(
671 origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
682 origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
672 ):
683 ):
673 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
684 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
674 return origfn(
685 return origfn(repo, wctx, mynode, orig, fcd, fco, fca, labels=labels)
675 premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
676 )
677
686
678 ahash = lfutil.readasstandin(fca).lower()
687 ahash = lfutil.readasstandin(fca).lower()
679 dhash = lfutil.readasstandin(fcd).lower()
688 dhash = lfutil.readasstandin(fcd).lower()
680 ohash = lfutil.readasstandin(fco).lower()
689 ohash = lfutil.readasstandin(fco).lower()
681 if (
690 if (
682 ohash != ahash
691 ohash != ahash
683 and ohash != dhash
692 and ohash != dhash
684 and (
693 and (
685 dhash == ahash
694 dhash == ahash
686 or repo.ui.promptchoice(
695 or repo.ui.promptchoice(
687 _(
696 _(
688 b'largefile %s has a merge conflict\nancestor was %s\n'
697 b'largefile %s has a merge conflict\nancestor was %s\n'
689 b'you can keep (l)ocal %s or take (o)ther %s.\n'
698 b'you can keep (l)ocal %s or take (o)ther %s.\n'
690 b'what do you want to do?'
699 b'what do you want to do?'
691 b'$$ &Local $$ &Other'
700 b'$$ &Local $$ &Other'
692 )
701 )
693 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
702 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
694 0,
703 0,
695 )
704 )
696 == 1
705 == 1
697 )
706 )
698 ):
707 ):
699 repo.wwrite(fcd.path(), fco.data(), fco.flags())
708 repo.wwrite(fcd.path(), fco.data(), fco.flags())
700 return True, 0, False
709 return 0, False
701
710
702
711
703 @eh.wrapfunction(copiesmod, b'pathcopies')
712 @eh.wrapfunction(copiesmod, b'pathcopies')
704 def copiespathcopies(orig, ctx1, ctx2, match=None):
713 def copiespathcopies(orig, ctx1, ctx2, match=None):
705 copies = orig(ctx1, ctx2, match=match)
714 copies = orig(ctx1, ctx2, match=match)
706 updated = {}
715 updated = {}
707
716
708 for k, v in pycompat.iteritems(copies):
717 for k, v in pycompat.iteritems(copies):
709 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
718 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
710
719
711 return updated
720 return updated
712
721
713
722
714 # Copy first changes the matchers to match standins instead of
723 # Copy first changes the matchers to match standins instead of
715 # largefiles. Then it overrides util.copyfile in that function it
724 # largefiles. Then it overrides util.copyfile in that function it
716 # checks if the destination largefile already exists. It also keeps a
725 # checks if the destination largefile already exists. It also keeps a
717 # list of copied files so that the largefiles can be copied and the
726 # list of copied files so that the largefiles can be copied and the
718 # dirstate updated.
727 # dirstate updated.
719 @eh.wrapfunction(cmdutil, b'copy')
728 @eh.wrapfunction(cmdutil, b'copy')
720 def overridecopy(orig, ui, repo, pats, opts, rename=False):
729 def overridecopy(orig, ui, repo, pats, opts, rename=False):
721 # doesn't remove largefile on rename
730 # doesn't remove largefile on rename
722 if len(pats) < 2:
731 if len(pats) < 2:
723 # this isn't legal, let the original function deal with it
732 # this isn't legal, let the original function deal with it
724 return orig(ui, repo, pats, opts, rename)
733 return orig(ui, repo, pats, opts, rename)
725
734
726 # This could copy both lfiles and normal files in one command,
735 # This could copy both lfiles and normal files in one command,
727 # but we don't want to do that. First replace their matcher to
736 # but we don't want to do that. First replace their matcher to
728 # only match normal files and run it, then replace it to just
737 # only match normal files and run it, then replace it to just
729 # match largefiles and run it again.
738 # match largefiles and run it again.
730 nonormalfiles = False
739 nonormalfiles = False
731 nolfiles = False
740 nolfiles = False
732 manifest = repo[None].manifest()
741 manifest = repo[None].manifest()
733
742
734 def normalfilesmatchfn(
743 def normalfilesmatchfn(
735 orig,
744 orig,
736 ctx,
745 ctx,
737 pats=(),
746 pats=(),
738 opts=None,
747 opts=None,
739 globbed=False,
748 globbed=False,
740 default=b'relpath',
749 default=b'relpath',
741 badfn=None,
750 badfn=None,
742 ):
751 ):
743 if opts is None:
752 if opts is None:
744 opts = {}
753 opts = {}
745 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
754 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
746 return composenormalfilematcher(match, manifest)
755 return composenormalfilematcher(match, manifest)
747
756
748 with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
757 with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
749 try:
758 try:
750 result = orig(ui, repo, pats, opts, rename)
759 result = orig(ui, repo, pats, opts, rename)
751 except error.Abort as e:
760 except error.Abort as e:
752 if e.message != _(b'no files to copy'):
761 if e.message != _(b'no files to copy'):
753 raise e
762 raise e
754 else:
763 else:
755 nonormalfiles = True
764 nonormalfiles = True
756 result = 0
765 result = 0
757
766
758 # The first rename can cause our current working directory to be removed.
767 # The first rename can cause our current working directory to be removed.
759 # In that case there is nothing left to copy/rename so just quit.
768 # In that case there is nothing left to copy/rename so just quit.
760 try:
769 try:
761 repo.getcwd()
770 repo.getcwd()
762 except OSError:
771 except OSError:
763 return result
772 return result
764
773
765 def makestandin(relpath):
774 def makestandin(relpath):
766 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
775 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
767 return repo.wvfs.join(lfutil.standin(path))
776 return repo.wvfs.join(lfutil.standin(path))
768
777
769 fullpats = scmutil.expandpats(pats)
778 fullpats = scmutil.expandpats(pats)
770 dest = fullpats[-1]
779 dest = fullpats[-1]
771
780
772 if os.path.isdir(dest):
781 if os.path.isdir(dest):
773 if not os.path.isdir(makestandin(dest)):
782 if not os.path.isdir(makestandin(dest)):
774 os.makedirs(makestandin(dest))
783 os.makedirs(makestandin(dest))
775
784
776 try:
785 try:
777 # When we call orig below it creates the standins but we don't add
786 # When we call orig below it creates the standins but we don't add
778 # them to the dir state until later so lock during that time.
787 # them to the dir state until later so lock during that time.
779 wlock = repo.wlock()
788 wlock = repo.wlock()
780
789
781 manifest = repo[None].manifest()
790 manifest = repo[None].manifest()
782
791
783 def overridematch(
792 def overridematch(
784 orig,
793 orig,
785 ctx,
794 ctx,
786 pats=(),
795 pats=(),
787 opts=None,
796 opts=None,
788 globbed=False,
797 globbed=False,
789 default=b'relpath',
798 default=b'relpath',
790 badfn=None,
799 badfn=None,
791 ):
800 ):
792 if opts is None:
801 if opts is None:
793 opts = {}
802 opts = {}
794 newpats = []
803 newpats = []
795 # The patterns were previously mangled to add the standin
804 # The patterns were previously mangled to add the standin
796 # directory; we need to remove that now
805 # directory; we need to remove that now
797 for pat in pats:
806 for pat in pats:
798 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
807 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
799 newpats.append(pat.replace(lfutil.shortname, b''))
808 newpats.append(pat.replace(lfutil.shortname, b''))
800 else:
809 else:
801 newpats.append(pat)
810 newpats.append(pat)
802 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
811 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
803 m = copy.copy(match)
812 m = copy.copy(match)
804 lfile = lambda f: lfutil.standin(f) in manifest
813 lfile = lambda f: lfutil.standin(f) in manifest
805 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
814 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
806 m._fileset = set(m._files)
815 m._fileset = set(m._files)
807 origmatchfn = m.matchfn
816 origmatchfn = m.matchfn
808
817
809 def matchfn(f):
818 def matchfn(f):
810 lfile = lfutil.splitstandin(f)
819 lfile = lfutil.splitstandin(f)
811 return (
820 return (
812 lfile is not None
821 lfile is not None
813 and (f in manifest)
822 and (f in manifest)
814 and origmatchfn(lfile)
823 and origmatchfn(lfile)
815 or None
824 or None
816 )
825 )
817
826
818 m.matchfn = matchfn
827 m.matchfn = matchfn
819 return m
828 return m
820
829
821 listpats = []
830 listpats = []
822 for pat in pats:
831 for pat in pats:
823 if matchmod.patkind(pat) is not None:
832 if matchmod.patkind(pat) is not None:
824 listpats.append(pat)
833 listpats.append(pat)
825 else:
834 else:
826 listpats.append(makestandin(pat))
835 listpats.append(makestandin(pat))
827
836
828 copiedfiles = []
837 copiedfiles = []
829
838
830 def overridecopyfile(orig, src, dest, *args, **kwargs):
839 def overridecopyfile(orig, src, dest, *args, **kwargs):
831 if lfutil.shortname in src and dest.startswith(
840 if lfutil.shortname in src and dest.startswith(
832 repo.wjoin(lfutil.shortname)
841 repo.wjoin(lfutil.shortname)
833 ):
842 ):
834 destlfile = dest.replace(lfutil.shortname, b'')
843 destlfile = dest.replace(lfutil.shortname, b'')
835 if not opts[b'force'] and os.path.exists(destlfile):
844 if not opts[b'force'] and os.path.exists(destlfile):
836 raise IOError(
845 raise IOError(
837 b'', _(b'destination largefile already exists')
846 b'', _(b'destination largefile already exists')
838 )
847 )
839 copiedfiles.append((src, dest))
848 copiedfiles.append((src, dest))
840 orig(src, dest, *args, **kwargs)
849 orig(src, dest, *args, **kwargs)
841
850
842 with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
851 with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
843 with extensions.wrappedfunction(scmutil, b'match', overridematch):
852 with extensions.wrappedfunction(scmutil, b'match', overridematch):
844 result += orig(ui, repo, listpats, opts, rename)
853 result += orig(ui, repo, listpats, opts, rename)
845
854
846 lfdirstate = lfutil.openlfdirstate(ui, repo)
855 lfdirstate = lfutil.openlfdirstate(ui, repo)
847 for (src, dest) in copiedfiles:
856 for (src, dest) in copiedfiles:
848 if lfutil.shortname in src and dest.startswith(
857 if lfutil.shortname in src and dest.startswith(
849 repo.wjoin(lfutil.shortname)
858 repo.wjoin(lfutil.shortname)
850 ):
859 ):
851 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
860 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
852 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
861 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
853 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
862 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
854 if not os.path.isdir(destlfiledir):
863 if not os.path.isdir(destlfiledir):
855 os.makedirs(destlfiledir)
864 os.makedirs(destlfiledir)
856 if rename:
865 if rename:
857 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
866 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
858
867
859 # The file is gone, but this deletes any empty parent
868 # The file is gone, but this deletes any empty parent
860 # directories as a side-effect.
869 # directories as a side-effect.
861 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
870 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
862 lfdirstate.set_untracked(srclfile)
871 lfdirstate.set_untracked(srclfile)
863 else:
872 else:
864 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
873 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
865
874
866 lfdirstate.set_tracked(destlfile)
875 lfdirstate.set_tracked(destlfile)
867 lfdirstate.write(repo.currenttransaction())
876 lfdirstate.write(repo.currenttransaction())
868 except error.Abort as e:
877 except error.Abort as e:
869 if e.message != _(b'no files to copy'):
878 if e.message != _(b'no files to copy'):
870 raise e
879 raise e
871 else:
880 else:
872 nolfiles = True
881 nolfiles = True
873 finally:
882 finally:
874 wlock.release()
883 wlock.release()
875
884
876 if nolfiles and nonormalfiles:
885 if nolfiles and nonormalfiles:
877 raise error.Abort(_(b'no files to copy'))
886 raise error.Abort(_(b'no files to copy'))
878
887
879 return result
888 return result
880
889
881
890
882 # When the user calls revert, we have to be careful to not revert any
891 # When the user calls revert, we have to be careful to not revert any
883 # changes to other largefiles accidentally. This means we have to keep
892 # changes to other largefiles accidentally. This means we have to keep
884 # track of the largefiles that are being reverted so we only pull down
893 # track of the largefiles that are being reverted so we only pull down
885 # the necessary largefiles.
894 # the necessary largefiles.
886 #
895 #
887 # Standins are only updated (to match the hash of largefiles) before
896 # Standins are only updated (to match the hash of largefiles) before
888 # commits. Update the standins then run the original revert, changing
897 # commits. Update the standins then run the original revert, changing
889 # the matcher to hit standins instead of largefiles. Based on the
898 # the matcher to hit standins instead of largefiles. Based on the
890 # resulting standins update the largefiles.
899 # resulting standins update the largefiles.
891 @eh.wrapfunction(cmdutil, b'revert')
900 @eh.wrapfunction(cmdutil, b'revert')
892 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
901 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
893 # Because we put the standins in a bad state (by updating them)
902 # Because we put the standins in a bad state (by updating them)
894 # and then return them to a correct state we need to lock to
903 # and then return them to a correct state we need to lock to
895 # prevent others from changing them in their incorrect state.
904 # prevent others from changing them in their incorrect state.
896 with repo.wlock():
905 with repo.wlock():
897 lfdirstate = lfutil.openlfdirstate(ui, repo)
906 lfdirstate = lfutil.openlfdirstate(ui, repo)
898 s = lfutil.lfdirstatestatus(lfdirstate, repo)
907 s = lfutil.lfdirstatestatus(lfdirstate, repo)
899 lfdirstate.write(repo.currenttransaction())
908 lfdirstate.write(repo.currenttransaction())
900 for lfile in s.modified:
909 for lfile in s.modified:
901 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
910 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
902 for lfile in s.deleted:
911 for lfile in s.deleted:
903 fstandin = lfutil.standin(lfile)
912 fstandin = lfutil.standin(lfile)
904 if repo.wvfs.exists(fstandin):
913 if repo.wvfs.exists(fstandin):
905 repo.wvfs.unlink(fstandin)
914 repo.wvfs.unlink(fstandin)
906
915
907 oldstandins = lfutil.getstandinsstate(repo)
916 oldstandins = lfutil.getstandinsstate(repo)
908
917
909 def overridematch(
918 def overridematch(
910 orig,
919 orig,
911 mctx,
920 mctx,
912 pats=(),
921 pats=(),
913 opts=None,
922 opts=None,
914 globbed=False,
923 globbed=False,
915 default=b'relpath',
924 default=b'relpath',
916 badfn=None,
925 badfn=None,
917 ):
926 ):
918 if opts is None:
927 if opts is None:
919 opts = {}
928 opts = {}
920 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
929 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
921 m = copy.copy(match)
930 m = copy.copy(match)
922
931
923 # revert supports recursing into subrepos, and though largefiles
932 # revert supports recursing into subrepos, and though largefiles
924 # currently doesn't work correctly in that case, this match is
933 # currently doesn't work correctly in that case, this match is
925 # called, so the lfdirstate above may not be the correct one for
934 # called, so the lfdirstate above may not be the correct one for
926 # this invocation of match.
935 # this invocation of match.
927 lfdirstate = lfutil.openlfdirstate(
936 lfdirstate = lfutil.openlfdirstate(
928 mctx.repo().ui, mctx.repo(), False
937 mctx.repo().ui, mctx.repo(), False
929 )
938 )
930
939
931 wctx = repo[None]
940 wctx = repo[None]
932 matchfiles = []
941 matchfiles = []
933 for f in m._files:
942 for f in m._files:
934 standin = lfutil.standin(f)
943 standin = lfutil.standin(f)
935 if standin in ctx or standin in mctx:
944 if standin in ctx or standin in mctx:
936 matchfiles.append(standin)
945 matchfiles.append(standin)
937 elif standin in wctx or lfdirstate.get_entry(f).removed:
946 elif standin in wctx or lfdirstate.get_entry(f).removed:
938 continue
947 continue
939 else:
948 else:
940 matchfiles.append(f)
949 matchfiles.append(f)
941 m._files = matchfiles
950 m._files = matchfiles
942 m._fileset = set(m._files)
951 m._fileset = set(m._files)
943 origmatchfn = m.matchfn
952 origmatchfn = m.matchfn
944
953
945 def matchfn(f):
954 def matchfn(f):
946 lfile = lfutil.splitstandin(f)
955 lfile = lfutil.splitstandin(f)
947 if lfile is not None:
956 if lfile is not None:
948 return origmatchfn(lfile) and (f in ctx or f in mctx)
957 return origmatchfn(lfile) and (f in ctx or f in mctx)
949 return origmatchfn(f)
958 return origmatchfn(f)
950
959
951 m.matchfn = matchfn
960 m.matchfn = matchfn
952 return m
961 return m
953
962
954 with extensions.wrappedfunction(scmutil, b'match', overridematch):
963 with extensions.wrappedfunction(scmutil, b'match', overridematch):
955 orig(ui, repo, ctx, *pats, **opts)
964 orig(ui, repo, ctx, *pats, **opts)
956
965
957 newstandins = lfutil.getstandinsstate(repo)
966 newstandins = lfutil.getstandinsstate(repo)
958 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
967 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
959 # lfdirstate should be 'normallookup'-ed for updated files,
968 # lfdirstate should be 'normallookup'-ed for updated files,
960 # because reverting doesn't touch dirstate for 'normal' files
969 # because reverting doesn't touch dirstate for 'normal' files
961 # when target revision is explicitly specified: in such case,
970 # when target revision is explicitly specified: in such case,
962 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
971 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
963 # of target (standin) file.
972 # of target (standin) file.
964 lfcommands.updatelfiles(
973 lfcommands.updatelfiles(
965 ui, repo, filelist, printmessage=False, normallookup=True
974 ui, repo, filelist, printmessage=False, normallookup=True
966 )
975 )
967
976
968
977
969 # after pulling changesets, we need to take some extra care to get
978 # after pulling changesets, we need to take some extra care to get
970 # largefiles updated remotely
979 # largefiles updated remotely
971 @eh.wrapcommand(
980 @eh.wrapcommand(
972 b'pull',
981 b'pull',
973 opts=[
982 opts=[
974 (
983 (
975 b'',
984 b'',
976 b'all-largefiles',
985 b'all-largefiles',
977 None,
986 None,
978 _(b'download all pulled versions of largefiles (DEPRECATED)'),
987 _(b'download all pulled versions of largefiles (DEPRECATED)'),
979 ),
988 ),
980 (
989 (
981 b'',
990 b'',
982 b'lfrev',
991 b'lfrev',
983 [],
992 [],
984 _(b'download largefiles for these revisions'),
993 _(b'download largefiles for these revisions'),
985 _(b'REV'),
994 _(b'REV'),
986 ),
995 ),
987 ],
996 ],
988 )
997 )
989 def overridepull(orig, ui, repo, source=None, **opts):
998 def overridepull(orig, ui, repo, source=None, **opts):
990 revsprepull = len(repo)
999 revsprepull = len(repo)
991 if not source:
1000 if not source:
992 source = b'default'
1001 source = b'default'
993 repo.lfpullsource = source
1002 repo.lfpullsource = source
994 result = orig(ui, repo, source, **opts)
1003 result = orig(ui, repo, source, **opts)
995 revspostpull = len(repo)
1004 revspostpull = len(repo)
996 lfrevs = opts.get('lfrev', [])
1005 lfrevs = opts.get('lfrev', [])
997 if opts.get('all_largefiles'):
1006 if opts.get('all_largefiles'):
998 lfrevs.append(b'pulled()')
1007 lfrevs.append(b'pulled()')
999 if lfrevs and revspostpull > revsprepull:
1008 if lfrevs and revspostpull > revsprepull:
1000 numcached = 0
1009 numcached = 0
1001 repo.firstpulled = revsprepull # for pulled() revset expression
1010 repo.firstpulled = revsprepull # for pulled() revset expression
1002 try:
1011 try:
1003 for rev in logcmdutil.revrange(repo, lfrevs):
1012 for rev in logcmdutil.revrange(repo, lfrevs):
1004 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1013 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1005 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1014 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1006 numcached += len(cached)
1015 numcached += len(cached)
1007 finally:
1016 finally:
1008 del repo.firstpulled
1017 del repo.firstpulled
1009 ui.status(_(b"%d largefiles cached\n") % numcached)
1018 ui.status(_(b"%d largefiles cached\n") % numcached)
1010 return result
1019 return result
1011
1020
1012
1021
1013 @eh.wrapcommand(
1022 @eh.wrapcommand(
1014 b'push',
1023 b'push',
1015 opts=[
1024 opts=[
1016 (
1025 (
1017 b'',
1026 b'',
1018 b'lfrev',
1027 b'lfrev',
1019 [],
1028 [],
1020 _(b'upload largefiles for these revisions'),
1029 _(b'upload largefiles for these revisions'),
1021 _(b'REV'),
1030 _(b'REV'),
1022 )
1031 )
1023 ],
1032 ],
1024 )
1033 )
1025 def overridepush(orig, ui, repo, *args, **kwargs):
1034 def overridepush(orig, ui, repo, *args, **kwargs):
1026 """Override push command and store --lfrev parameters in opargs"""
1035 """Override push command and store --lfrev parameters in opargs"""
1027 lfrevs = kwargs.pop('lfrev', None)
1036 lfrevs = kwargs.pop('lfrev', None)
1028 if lfrevs:
1037 if lfrevs:
1029 opargs = kwargs.setdefault('opargs', {})
1038 opargs = kwargs.setdefault('opargs', {})
1030 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1039 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1031 return orig(ui, repo, *args, **kwargs)
1040 return orig(ui, repo, *args, **kwargs)
1032
1041
1033
1042
1034 @eh.wrapfunction(exchange, b'pushoperation')
1043 @eh.wrapfunction(exchange, b'pushoperation')
1035 def exchangepushoperation(orig, *args, **kwargs):
1044 def exchangepushoperation(orig, *args, **kwargs):
1036 """Override pushoperation constructor and store lfrevs parameter"""
1045 """Override pushoperation constructor and store lfrevs parameter"""
1037 lfrevs = kwargs.pop('lfrevs', None)
1046 lfrevs = kwargs.pop('lfrevs', None)
1038 pushop = orig(*args, **kwargs)
1047 pushop = orig(*args, **kwargs)
1039 pushop.lfrevs = lfrevs
1048 pushop.lfrevs = lfrevs
1040 return pushop
1049 return pushop
1041
1050
1042
1051
1043 @eh.revsetpredicate(b'pulled()')
1052 @eh.revsetpredicate(b'pulled()')
1044 def pulledrevsetsymbol(repo, subset, x):
1053 def pulledrevsetsymbol(repo, subset, x):
1045 """Changesets that just has been pulled.
1054 """Changesets that just has been pulled.
1046
1055
1047 Only available with largefiles from pull --lfrev expressions.
1056 Only available with largefiles from pull --lfrev expressions.
1048
1057
1049 .. container:: verbose
1058 .. container:: verbose
1050
1059
1051 Some examples:
1060 Some examples:
1052
1061
1053 - pull largefiles for all new changesets::
1062 - pull largefiles for all new changesets::
1054
1063
1055 hg pull -lfrev "pulled()"
1064 hg pull -lfrev "pulled()"
1056
1065
1057 - pull largefiles for all new branch heads::
1066 - pull largefiles for all new branch heads::
1058
1067
1059 hg pull -lfrev "head(pulled()) and not closed()"
1068 hg pull -lfrev "head(pulled()) and not closed()"
1060
1069
1061 """
1070 """
1062
1071
1063 try:
1072 try:
1064 firstpulled = repo.firstpulled
1073 firstpulled = repo.firstpulled
1065 except AttributeError:
1074 except AttributeError:
1066 raise error.Abort(_(b"pulled() only available in --lfrev"))
1075 raise error.Abort(_(b"pulled() only available in --lfrev"))
1067 return smartset.baseset([r for r in subset if r >= firstpulled])
1076 return smartset.baseset([r for r in subset if r >= firstpulled])
1068
1077
1069
1078
1070 @eh.wrapcommand(
1079 @eh.wrapcommand(
1071 b'clone',
1080 b'clone',
1072 opts=[
1081 opts=[
1073 (
1082 (
1074 b'',
1083 b'',
1075 b'all-largefiles',
1084 b'all-largefiles',
1076 None,
1085 None,
1077 _(b'download all versions of all largefiles'),
1086 _(b'download all versions of all largefiles'),
1078 )
1087 )
1079 ],
1088 ],
1080 )
1089 )
1081 def overrideclone(orig, ui, source, dest=None, **opts):
1090 def overrideclone(orig, ui, source, dest=None, **opts):
1082 d = dest
1091 d = dest
1083 if d is None:
1092 if d is None:
1084 d = hg.defaultdest(source)
1093 d = hg.defaultdest(source)
1085 if opts.get('all_largefiles') and not hg.islocal(d):
1094 if opts.get('all_largefiles') and not hg.islocal(d):
1086 raise error.Abort(
1095 raise error.Abort(
1087 _(b'--all-largefiles is incompatible with non-local destination %s')
1096 _(b'--all-largefiles is incompatible with non-local destination %s')
1088 % d
1097 % d
1089 )
1098 )
1090
1099
1091 return orig(ui, source, dest, **opts)
1100 return orig(ui, source, dest, **opts)
1092
1101
1093
1102
1094 @eh.wrapfunction(hg, b'clone')
1103 @eh.wrapfunction(hg, b'clone')
1095 def hgclone(orig, ui, opts, *args, **kwargs):
1104 def hgclone(orig, ui, opts, *args, **kwargs):
1096 result = orig(ui, opts, *args, **kwargs)
1105 result = orig(ui, opts, *args, **kwargs)
1097
1106
1098 if result is not None:
1107 if result is not None:
1099 sourcerepo, destrepo = result
1108 sourcerepo, destrepo = result
1100 repo = destrepo.local()
1109 repo = destrepo.local()
1101
1110
1102 # When cloning to a remote repo (like through SSH), no repo is available
1111 # When cloning to a remote repo (like through SSH), no repo is available
1103 # from the peer. Therefore the largefiles can't be downloaded and the
1112 # from the peer. Therefore the largefiles can't be downloaded and the
1104 # hgrc can't be updated.
1113 # hgrc can't be updated.
1105 if not repo:
1114 if not repo:
1106 return result
1115 return result
1107
1116
1108 # Caching is implicitly limited to 'rev' option, since the dest repo was
1117 # Caching is implicitly limited to 'rev' option, since the dest repo was
1109 # truncated at that point. The user may expect a download count with
1118 # truncated at that point. The user may expect a download count with
1110 # this option, so attempt whether or not this is a largefile repo.
1119 # this option, so attempt whether or not this is a largefile repo.
1111 if opts.get(b'all_largefiles'):
1120 if opts.get(b'all_largefiles'):
1112 success, missing = lfcommands.downloadlfiles(ui, repo)
1121 success, missing = lfcommands.downloadlfiles(ui, repo)
1113
1122
1114 if missing != 0:
1123 if missing != 0:
1115 return None
1124 return None
1116
1125
1117 return result
1126 return result
1118
1127
1119
1128
1120 @eh.wrapcommand(b'rebase', extension=b'rebase')
1129 @eh.wrapcommand(b'rebase', extension=b'rebase')
1121 def overriderebasecmd(orig, ui, repo, **opts):
1130 def overriderebasecmd(orig, ui, repo, **opts):
1122 if not util.safehasattr(repo, b'_largefilesenabled'):
1131 if not util.safehasattr(repo, b'_largefilesenabled'):
1123 return orig(ui, repo, **opts)
1132 return orig(ui, repo, **opts)
1124
1133
1125 resuming = opts.get('continue')
1134 resuming = opts.get('continue')
1126 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1135 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1127 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1136 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1128 try:
1137 try:
1129 with ui.configoverride(
1138 with ui.configoverride(
1130 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1139 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1131 ):
1140 ):
1132 return orig(ui, repo, **opts)
1141 return orig(ui, repo, **opts)
1133 finally:
1142 finally:
1134 repo._lfstatuswriters.pop()
1143 repo._lfstatuswriters.pop()
1135 repo._lfcommithooks.pop()
1144 repo._lfcommithooks.pop()
1136
1145
1137
1146
1138 @eh.extsetup
1147 @eh.extsetup
1139 def overriderebase(ui):
1148 def overriderebase(ui):
1140 try:
1149 try:
1141 rebase = extensions.find(b'rebase')
1150 rebase = extensions.find(b'rebase')
1142 except KeyError:
1151 except KeyError:
1143 pass
1152 pass
1144 else:
1153 else:
1145
1154
1146 def _dorebase(orig, *args, **kwargs):
1155 def _dorebase(orig, *args, **kwargs):
1147 kwargs['inmemory'] = False
1156 kwargs['inmemory'] = False
1148 return orig(*args, **kwargs)
1157 return orig(*args, **kwargs)
1149
1158
1150 extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
1159 extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
1151
1160
1152
1161
1153 @eh.wrapcommand(b'archive')
1162 @eh.wrapcommand(b'archive')
1154 def overridearchivecmd(orig, ui, repo, dest, **opts):
1163 def overridearchivecmd(orig, ui, repo, dest, **opts):
1155 with lfstatus(repo.unfiltered()):
1164 with lfstatus(repo.unfiltered()):
1156 return orig(ui, repo.unfiltered(), dest, **opts)
1165 return orig(ui, repo.unfiltered(), dest, **opts)
1157
1166
1158
1167
1159 @eh.wrapfunction(webcommands, b'archive')
1168 @eh.wrapfunction(webcommands, b'archive')
1160 def hgwebarchive(orig, web):
1169 def hgwebarchive(orig, web):
1161 with lfstatus(web.repo):
1170 with lfstatus(web.repo):
1162 return orig(web)
1171 return orig(web)
1163
1172
1164
1173
1165 @eh.wrapfunction(archival, b'archive')
1174 @eh.wrapfunction(archival, b'archive')
1166 def overridearchive(
1175 def overridearchive(
1167 orig,
1176 orig,
1168 repo,
1177 repo,
1169 dest,
1178 dest,
1170 node,
1179 node,
1171 kind,
1180 kind,
1172 decode=True,
1181 decode=True,
1173 match=None,
1182 match=None,
1174 prefix=b'',
1183 prefix=b'',
1175 mtime=None,
1184 mtime=None,
1176 subrepos=None,
1185 subrepos=None,
1177 ):
1186 ):
1178 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1187 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1179 # unfiltered repo's attr, so check that as well.
1188 # unfiltered repo's attr, so check that as well.
1180 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1189 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1181 return orig(
1190 return orig(
1182 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1191 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1183 )
1192 )
1184
1193
1185 # No need to lock because we are only reading history and
1194 # No need to lock because we are only reading history and
1186 # largefile caches, neither of which are modified.
1195 # largefile caches, neither of which are modified.
1187 if node is not None:
1196 if node is not None:
1188 lfcommands.cachelfiles(repo.ui, repo, node)
1197 lfcommands.cachelfiles(repo.ui, repo, node)
1189
1198
1190 if kind not in archival.archivers:
1199 if kind not in archival.archivers:
1191 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1200 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1192
1201
1193 ctx = repo[node]
1202 ctx = repo[node]
1194
1203
1195 if kind == b'files':
1204 if kind == b'files':
1196 if prefix:
1205 if prefix:
1197 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1206 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1198 else:
1207 else:
1199 prefix = archival.tidyprefix(dest, kind, prefix)
1208 prefix = archival.tidyprefix(dest, kind, prefix)
1200
1209
1201 def write(name, mode, islink, getdata):
1210 def write(name, mode, islink, getdata):
1202 if match and not match(name):
1211 if match and not match(name):
1203 return
1212 return
1204 data = getdata()
1213 data = getdata()
1205 if decode:
1214 if decode:
1206 data = repo.wwritedata(name, data)
1215 data = repo.wwritedata(name, data)
1207 archiver.addfile(prefix + name, mode, islink, data)
1216 archiver.addfile(prefix + name, mode, islink, data)
1208
1217
1209 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1218 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1210
1219
1211 if repo.ui.configbool(b"ui", b"archivemeta"):
1220 if repo.ui.configbool(b"ui", b"archivemeta"):
1212 write(
1221 write(
1213 b'.hg_archival.txt',
1222 b'.hg_archival.txt',
1214 0o644,
1223 0o644,
1215 False,
1224 False,
1216 lambda: archival.buildmetadata(ctx),
1225 lambda: archival.buildmetadata(ctx),
1217 )
1226 )
1218
1227
1219 for f in ctx:
1228 for f in ctx:
1220 ff = ctx.flags(f)
1229 ff = ctx.flags(f)
1221 getdata = ctx[f].data
1230 getdata = ctx[f].data
1222 lfile = lfutil.splitstandin(f)
1231 lfile = lfutil.splitstandin(f)
1223 if lfile is not None:
1232 if lfile is not None:
1224 if node is not None:
1233 if node is not None:
1225 path = lfutil.findfile(repo, getdata().strip())
1234 path = lfutil.findfile(repo, getdata().strip())
1226
1235
1227 if path is None:
1236 if path is None:
1228 raise error.Abort(
1237 raise error.Abort(
1229 _(
1238 _(
1230 b'largefile %s not found in repo store or system cache'
1239 b'largefile %s not found in repo store or system cache'
1231 )
1240 )
1232 % lfile
1241 % lfile
1233 )
1242 )
1234 else:
1243 else:
1235 path = lfile
1244 path = lfile
1236
1245
1237 f = lfile
1246 f = lfile
1238
1247
1239 getdata = lambda: util.readfile(path)
1248 getdata = lambda: util.readfile(path)
1240 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1249 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1241
1250
1242 if subrepos:
1251 if subrepos:
1243 for subpath in sorted(ctx.substate):
1252 for subpath in sorted(ctx.substate):
1244 sub = ctx.workingsub(subpath)
1253 sub = ctx.workingsub(subpath)
1245 submatch = matchmod.subdirmatcher(subpath, match)
1254 submatch = matchmod.subdirmatcher(subpath, match)
1246 subprefix = prefix + subpath + b'/'
1255 subprefix = prefix + subpath + b'/'
1247
1256
1248 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1257 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1249 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1258 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1250 # allow only hgsubrepos to set this, instead of the current scheme
1259 # allow only hgsubrepos to set this, instead of the current scheme
1251 # where the parent sets this for the child.
1260 # where the parent sets this for the child.
1252 with (
1261 with (
1253 util.safehasattr(sub, '_repo')
1262 util.safehasattr(sub, '_repo')
1254 and lfstatus(sub._repo)
1263 and lfstatus(sub._repo)
1255 or util.nullcontextmanager()
1264 or util.nullcontextmanager()
1256 ):
1265 ):
1257 sub.archive(archiver, subprefix, submatch)
1266 sub.archive(archiver, subprefix, submatch)
1258
1267
1259 archiver.done()
1268 archiver.done()
1260
1269
1261
1270
1262 @eh.wrapfunction(subrepo.hgsubrepo, b'archive')
1271 @eh.wrapfunction(subrepo.hgsubrepo, b'archive')
1263 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1272 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1264 lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
1273 lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
1265 if not lfenabled or not repo._repo.lfstatus:
1274 if not lfenabled or not repo._repo.lfstatus:
1266 return orig(repo, archiver, prefix, match, decode)
1275 return orig(repo, archiver, prefix, match, decode)
1267
1276
1268 repo._get(repo._state + (b'hg',))
1277 repo._get(repo._state + (b'hg',))
1269 rev = repo._state[1]
1278 rev = repo._state[1]
1270 ctx = repo._repo[rev]
1279 ctx = repo._repo[rev]
1271
1280
1272 if ctx.node() is not None:
1281 if ctx.node() is not None:
1273 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1282 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1274
1283
1275 def write(name, mode, islink, getdata):
1284 def write(name, mode, islink, getdata):
1276 # At this point, the standin has been replaced with the largefile name,
1285 # At this point, the standin has been replaced with the largefile name,
1277 # so the normal matcher works here without the lfutil variants.
1286 # so the normal matcher works here without the lfutil variants.
1278 if match and not match(f):
1287 if match and not match(f):
1279 return
1288 return
1280 data = getdata()
1289 data = getdata()
1281 if decode:
1290 if decode:
1282 data = repo._repo.wwritedata(name, data)
1291 data = repo._repo.wwritedata(name, data)
1283
1292
1284 archiver.addfile(prefix + name, mode, islink, data)
1293 archiver.addfile(prefix + name, mode, islink, data)
1285
1294
1286 for f in ctx:
1295 for f in ctx:
1287 ff = ctx.flags(f)
1296 ff = ctx.flags(f)
1288 getdata = ctx[f].data
1297 getdata = ctx[f].data
1289 lfile = lfutil.splitstandin(f)
1298 lfile = lfutil.splitstandin(f)
1290 if lfile is not None:
1299 if lfile is not None:
1291 if ctx.node() is not None:
1300 if ctx.node() is not None:
1292 path = lfutil.findfile(repo._repo, getdata().strip())
1301 path = lfutil.findfile(repo._repo, getdata().strip())
1293
1302
1294 if path is None:
1303 if path is None:
1295 raise error.Abort(
1304 raise error.Abort(
1296 _(
1305 _(
1297 b'largefile %s not found in repo store or system cache'
1306 b'largefile %s not found in repo store or system cache'
1298 )
1307 )
1299 % lfile
1308 % lfile
1300 )
1309 )
1301 else:
1310 else:
1302 path = lfile
1311 path = lfile
1303
1312
1304 f = lfile
1313 f = lfile
1305
1314
1306 getdata = lambda: util.readfile(os.path.join(prefix, path))
1315 getdata = lambda: util.readfile(os.path.join(prefix, path))
1307
1316
1308 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1317 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1309
1318
1310 for subpath in sorted(ctx.substate):
1319 for subpath in sorted(ctx.substate):
1311 sub = ctx.workingsub(subpath)
1320 sub = ctx.workingsub(subpath)
1312 submatch = matchmod.subdirmatcher(subpath, match)
1321 submatch = matchmod.subdirmatcher(subpath, match)
1313 subprefix = prefix + subpath + b'/'
1322 subprefix = prefix + subpath + b'/'
1314 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1323 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1315 # infer and possibly set lfstatus at the top of this function. That
1324 # infer and possibly set lfstatus at the top of this function. That
1316 # would allow only hgsubrepos to set this, instead of the current scheme
1325 # would allow only hgsubrepos to set this, instead of the current scheme
1317 # where the parent sets this for the child.
1326 # where the parent sets this for the child.
1318 with (
1327 with (
1319 util.safehasattr(sub, '_repo')
1328 util.safehasattr(sub, '_repo')
1320 and lfstatus(sub._repo)
1329 and lfstatus(sub._repo)
1321 or util.nullcontextmanager()
1330 or util.nullcontextmanager()
1322 ):
1331 ):
1323 sub.archive(archiver, subprefix, submatch, decode)
1332 sub.archive(archiver, subprefix, submatch, decode)
1324
1333
1325
1334
1326 # If a largefile is modified, the change is not reflected in its
1335 # If a largefile is modified, the change is not reflected in its
1327 # standin until a commit. cmdutil.bailifchanged() raises an exception
1336 # standin until a commit. cmdutil.bailifchanged() raises an exception
1328 # if the repo has uncommitted changes. Wrap it to also check if
1337 # if the repo has uncommitted changes. Wrap it to also check if
1329 # largefiles were changed. This is used by bisect, backout and fetch.
1338 # largefiles were changed. This is used by bisect, backout and fetch.
1330 @eh.wrapfunction(cmdutil, b'bailifchanged')
1339 @eh.wrapfunction(cmdutil, b'bailifchanged')
1331 def overridebailifchanged(orig, repo, *args, **kwargs):
1340 def overridebailifchanged(orig, repo, *args, **kwargs):
1332 orig(repo, *args, **kwargs)
1341 orig(repo, *args, **kwargs)
1333 with lfstatus(repo):
1342 with lfstatus(repo):
1334 s = repo.status()
1343 s = repo.status()
1335 if s.modified or s.added or s.removed or s.deleted:
1344 if s.modified or s.added or s.removed or s.deleted:
1336 raise error.Abort(_(b'uncommitted changes'))
1345 raise error.Abort(_(b'uncommitted changes'))
1337
1346
1338
1347
1339 @eh.wrapfunction(cmdutil, b'postcommitstatus')
1348 @eh.wrapfunction(cmdutil, b'postcommitstatus')
1340 def postcommitstatus(orig, repo, *args, **kwargs):
1349 def postcommitstatus(orig, repo, *args, **kwargs):
1341 with lfstatus(repo):
1350 with lfstatus(repo):
1342 return orig(repo, *args, **kwargs)
1351 return orig(repo, *args, **kwargs)
1343
1352
1344
1353
1345 @eh.wrapfunction(cmdutil, b'forget')
1354 @eh.wrapfunction(cmdutil, b'forget')
1346 def cmdutilforget(
1355 def cmdutilforget(
1347 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1356 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1348 ):
1357 ):
1349 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1358 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1350 bad, forgot = orig(
1359 bad, forgot = orig(
1351 ui,
1360 ui,
1352 repo,
1361 repo,
1353 normalmatcher,
1362 normalmatcher,
1354 prefix,
1363 prefix,
1355 uipathfn,
1364 uipathfn,
1356 explicitonly,
1365 explicitonly,
1357 dryrun,
1366 dryrun,
1358 interactive,
1367 interactive,
1359 )
1368 )
1360 m = composelargefilematcher(match, repo[None].manifest())
1369 m = composelargefilematcher(match, repo[None].manifest())
1361
1370
1362 with lfstatus(repo):
1371 with lfstatus(repo):
1363 s = repo.status(match=m, clean=True)
1372 s = repo.status(match=m, clean=True)
1364 manifest = repo[None].manifest()
1373 manifest = repo[None].manifest()
1365 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1374 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1366 forget = [f for f in forget if lfutil.standin(f) in manifest]
1375 forget = [f for f in forget if lfutil.standin(f) in manifest]
1367
1376
1368 for f in forget:
1377 for f in forget:
1369 fstandin = lfutil.standin(f)
1378 fstandin = lfutil.standin(f)
1370 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1379 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1371 ui.warn(
1380 ui.warn(
1372 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1381 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1373 )
1382 )
1374 bad.append(f)
1383 bad.append(f)
1375
1384
1376 for f in forget:
1385 for f in forget:
1377 if ui.verbose or not m.exact(f):
1386 if ui.verbose or not m.exact(f):
1378 ui.status(_(b'removing %s\n') % uipathfn(f))
1387 ui.status(_(b'removing %s\n') % uipathfn(f))
1379
1388
1380 # Need to lock because standin files are deleted then removed from the
1389 # Need to lock because standin files are deleted then removed from the
1381 # repository and we could race in-between.
1390 # repository and we could race in-between.
1382 with repo.wlock():
1391 with repo.wlock():
1383 lfdirstate = lfutil.openlfdirstate(ui, repo)
1392 lfdirstate = lfutil.openlfdirstate(ui, repo)
1384 for f in forget:
1393 for f in forget:
1385 lfdirstate.set_untracked(f)
1394 lfdirstate.set_untracked(f)
1386 lfdirstate.write(repo.currenttransaction())
1395 lfdirstate.write(repo.currenttransaction())
1387 standins = [lfutil.standin(f) for f in forget]
1396 standins = [lfutil.standin(f) for f in forget]
1388 for f in standins:
1397 for f in standins:
1389 repo.wvfs.unlinkpath(f, ignoremissing=True)
1398 repo.wvfs.unlinkpath(f, ignoremissing=True)
1390 rejected = repo[None].forget(standins)
1399 rejected = repo[None].forget(standins)
1391
1400
1392 bad.extend(f for f in rejected if f in m.files())
1401 bad.extend(f for f in rejected if f in m.files())
1393 forgot.extend(f for f in forget if f not in rejected)
1402 forgot.extend(f for f in forget if f not in rejected)
1394 return bad, forgot
1403 return bad, forgot
1395
1404
1396
1405
1397 def _getoutgoings(repo, other, missing, addfunc):
1406 def _getoutgoings(repo, other, missing, addfunc):
1398 """get pairs of filename and largefile hash in outgoing revisions
1407 """get pairs of filename and largefile hash in outgoing revisions
1399 in 'missing'.
1408 in 'missing'.
1400
1409
1401 largefiles already existing on 'other' repository are ignored.
1410 largefiles already existing on 'other' repository are ignored.
1402
1411
1403 'addfunc' is invoked with each unique pairs of filename and
1412 'addfunc' is invoked with each unique pairs of filename and
1404 largefile hash value.
1413 largefile hash value.
1405 """
1414 """
1406 knowns = set()
1415 knowns = set()
1407 lfhashes = set()
1416 lfhashes = set()
1408
1417
1409 def dedup(fn, lfhash):
1418 def dedup(fn, lfhash):
1410 k = (fn, lfhash)
1419 k = (fn, lfhash)
1411 if k not in knowns:
1420 if k not in knowns:
1412 knowns.add(k)
1421 knowns.add(k)
1413 lfhashes.add(lfhash)
1422 lfhashes.add(lfhash)
1414
1423
1415 lfutil.getlfilestoupload(repo, missing, dedup)
1424 lfutil.getlfilestoupload(repo, missing, dedup)
1416 if lfhashes:
1425 if lfhashes:
1417 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1426 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1418 for fn, lfhash in knowns:
1427 for fn, lfhash in knowns:
1419 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1428 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1420 addfunc(fn, lfhash)
1429 addfunc(fn, lfhash)
1421
1430
1422
1431
1423 def outgoinghook(ui, repo, other, opts, missing):
1432 def outgoinghook(ui, repo, other, opts, missing):
1424 if opts.pop(b'large', None):
1433 if opts.pop(b'large', None):
1425 lfhashes = set()
1434 lfhashes = set()
1426 if ui.debugflag:
1435 if ui.debugflag:
1427 toupload = {}
1436 toupload = {}
1428
1437
1429 def addfunc(fn, lfhash):
1438 def addfunc(fn, lfhash):
1430 if fn not in toupload:
1439 if fn not in toupload:
1431 toupload[fn] = []
1440 toupload[fn] = []
1432 toupload[fn].append(lfhash)
1441 toupload[fn].append(lfhash)
1433 lfhashes.add(lfhash)
1442 lfhashes.add(lfhash)
1434
1443
1435 def showhashes(fn):
1444 def showhashes(fn):
1436 for lfhash in sorted(toupload[fn]):
1445 for lfhash in sorted(toupload[fn]):
1437 ui.debug(b' %s\n' % lfhash)
1446 ui.debug(b' %s\n' % lfhash)
1438
1447
1439 else:
1448 else:
1440 toupload = set()
1449 toupload = set()
1441
1450
1442 def addfunc(fn, lfhash):
1451 def addfunc(fn, lfhash):
1443 toupload.add(fn)
1452 toupload.add(fn)
1444 lfhashes.add(lfhash)
1453 lfhashes.add(lfhash)
1445
1454
1446 def showhashes(fn):
1455 def showhashes(fn):
1447 pass
1456 pass
1448
1457
1449 _getoutgoings(repo, other, missing, addfunc)
1458 _getoutgoings(repo, other, missing, addfunc)
1450
1459
1451 if not toupload:
1460 if not toupload:
1452 ui.status(_(b'largefiles: no files to upload\n'))
1461 ui.status(_(b'largefiles: no files to upload\n'))
1453 else:
1462 else:
1454 ui.status(
1463 ui.status(
1455 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1464 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1456 )
1465 )
1457 for file in sorted(toupload):
1466 for file in sorted(toupload):
1458 ui.status(lfutil.splitstandin(file) + b'\n')
1467 ui.status(lfutil.splitstandin(file) + b'\n')
1459 showhashes(file)
1468 showhashes(file)
1460 ui.status(b'\n')
1469 ui.status(b'\n')
1461
1470
1462
1471
1463 @eh.wrapcommand(
1472 @eh.wrapcommand(
1464 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1473 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1465 )
1474 )
1466 def _outgoingcmd(orig, *args, **kwargs):
1475 def _outgoingcmd(orig, *args, **kwargs):
1467 # Nothing to do here other than add the extra help option- the hook above
1476 # Nothing to do here other than add the extra help option- the hook above
1468 # processes it.
1477 # processes it.
1469 return orig(*args, **kwargs)
1478 return orig(*args, **kwargs)
1470
1479
1471
1480
1472 def summaryremotehook(ui, repo, opts, changes):
1481 def summaryremotehook(ui, repo, opts, changes):
1473 largeopt = opts.get(b'large', False)
1482 largeopt = opts.get(b'large', False)
1474 if changes is None:
1483 if changes is None:
1475 if largeopt:
1484 if largeopt:
1476 return (False, True) # only outgoing check is needed
1485 return (False, True) # only outgoing check is needed
1477 else:
1486 else:
1478 return (False, False)
1487 return (False, False)
1479 elif largeopt:
1488 elif largeopt:
1480 url, branch, peer, outgoing = changes[1]
1489 url, branch, peer, outgoing = changes[1]
1481 if peer is None:
1490 if peer is None:
1482 # i18n: column positioning for "hg summary"
1491 # i18n: column positioning for "hg summary"
1483 ui.status(_(b'largefiles: (no remote repo)\n'))
1492 ui.status(_(b'largefiles: (no remote repo)\n'))
1484 return
1493 return
1485
1494
1486 toupload = set()
1495 toupload = set()
1487 lfhashes = set()
1496 lfhashes = set()
1488
1497
1489 def addfunc(fn, lfhash):
1498 def addfunc(fn, lfhash):
1490 toupload.add(fn)
1499 toupload.add(fn)
1491 lfhashes.add(lfhash)
1500 lfhashes.add(lfhash)
1492
1501
1493 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1502 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1494
1503
1495 if not toupload:
1504 if not toupload:
1496 # i18n: column positioning for "hg summary"
1505 # i18n: column positioning for "hg summary"
1497 ui.status(_(b'largefiles: (no files to upload)\n'))
1506 ui.status(_(b'largefiles: (no files to upload)\n'))
1498 else:
1507 else:
1499 # i18n: column positioning for "hg summary"
1508 # i18n: column positioning for "hg summary"
1500 ui.status(
1509 ui.status(
1501 _(b'largefiles: %d entities for %d files to upload\n')
1510 _(b'largefiles: %d entities for %d files to upload\n')
1502 % (len(lfhashes), len(toupload))
1511 % (len(lfhashes), len(toupload))
1503 )
1512 )
1504
1513
1505
1514
1506 @eh.wrapcommand(
1515 @eh.wrapcommand(
1507 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1516 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1508 )
1517 )
1509 def overridesummary(orig, ui, repo, *pats, **opts):
1518 def overridesummary(orig, ui, repo, *pats, **opts):
1510 with lfstatus(repo):
1519 with lfstatus(repo):
1511 orig(ui, repo, *pats, **opts)
1520 orig(ui, repo, *pats, **opts)
1512
1521
1513
1522
1514 @eh.wrapfunction(scmutil, b'addremove')
1523 @eh.wrapfunction(scmutil, b'addremove')
1515 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
1524 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
1516 if opts is None:
1525 if opts is None:
1517 opts = {}
1526 opts = {}
1518 if not lfutil.islfilesrepo(repo):
1527 if not lfutil.islfilesrepo(repo):
1519 return orig(repo, matcher, prefix, uipathfn, opts)
1528 return orig(repo, matcher, prefix, uipathfn, opts)
1520 # Get the list of missing largefiles so we can remove them
1529 # Get the list of missing largefiles so we can remove them
1521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1530 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1522 unsure, s = lfdirstate.status(
1531 unsure, s, mtime_boundary = lfdirstate.status(
1523 matchmod.always(),
1532 matchmod.always(),
1524 subrepos=[],
1533 subrepos=[],
1525 ignored=False,
1534 ignored=False,
1526 clean=False,
1535 clean=False,
1527 unknown=False,
1536 unknown=False,
1528 )
1537 )
1529
1538
1530 # Call into the normal remove code, but the removing of the standin, we want
1539 # Call into the normal remove code, but the removing of the standin, we want
1531 # to have handled by original addremove. Monkey patching here makes sure
1540 # to have handled by original addremove. Monkey patching here makes sure
1532 # we don't remove the standin in the largefiles code, preventing a very
1541 # we don't remove the standin in the largefiles code, preventing a very
1533 # confused state later.
1542 # confused state later.
1534 if s.deleted:
1543 if s.deleted:
1535 m = copy.copy(matcher)
1544 m = copy.copy(matcher)
1536
1545
1537 # The m._files and m._map attributes are not changed to the deleted list
1546 # The m._files and m._map attributes are not changed to the deleted list
1538 # because that affects the m.exact() test, which in turn governs whether
1547 # because that affects the m.exact() test, which in turn governs whether
1539 # or not the file name is printed, and how. Simply limit the original
1548 # or not the file name is printed, and how. Simply limit the original
1540 # matches to those in the deleted status list.
1549 # matches to those in the deleted status list.
1541 matchfn = m.matchfn
1550 matchfn = m.matchfn
1542 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1551 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1543
1552
1544 removelargefiles(
1553 removelargefiles(
1545 repo.ui,
1554 repo.ui,
1546 repo,
1555 repo,
1547 True,
1556 True,
1548 m,
1557 m,
1549 uipathfn,
1558 uipathfn,
1550 opts.get(b'dry_run'),
1559 opts.get(b'dry_run'),
1551 **pycompat.strkwargs(opts)
1560 **pycompat.strkwargs(opts)
1552 )
1561 )
1553 # Call into the normal add code, and any files that *should* be added as
1562 # Call into the normal add code, and any files that *should* be added as
1554 # largefiles will be
1563 # largefiles will be
1555 added, bad = addlargefiles(
1564 added, bad = addlargefiles(
1556 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1565 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1557 )
1566 )
1558 # Now that we've handled largefiles, hand off to the original addremove
1567 # Now that we've handled largefiles, hand off to the original addremove
1559 # function to take care of the rest. Make sure it doesn't do anything with
1568 # function to take care of the rest. Make sure it doesn't do anything with
1560 # largefiles by passing a matcher that will ignore them.
1569 # largefiles by passing a matcher that will ignore them.
1561 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1570 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1562 return orig(repo, matcher, prefix, uipathfn, opts)
1571 return orig(repo, matcher, prefix, uipathfn, opts)
1563
1572
1564
1573
1565 # Calling purge with --all will cause the largefiles to be deleted.
1574 # Calling purge with --all will cause the largefiles to be deleted.
1566 # Override repo.status to prevent this from happening.
1575 # Override repo.status to prevent this from happening.
1567 @eh.wrapcommand(b'purge')
1576 @eh.wrapcommand(b'purge')
1568 def overridepurge(orig, ui, repo, *dirs, **opts):
1577 def overridepurge(orig, ui, repo, *dirs, **opts):
1569 # XXX Monkey patching a repoview will not work. The assigned attribute will
1578 # XXX Monkey patching a repoview will not work. The assigned attribute will
1570 # be set on the unfiltered repo, but we will only lookup attributes in the
1579 # be set on the unfiltered repo, but we will only lookup attributes in the
1571 # unfiltered repo if the lookup in the repoview object itself fails. As the
1580 # unfiltered repo if the lookup in the repoview object itself fails. As the
1572 # monkey patched method exists on the repoview class the lookup will not
1581 # monkey patched method exists on the repoview class the lookup will not
1573 # fail. As a result, the original version will shadow the monkey patched
1582 # fail. As a result, the original version will shadow the monkey patched
1574 # one, defeating the monkey patch.
1583 # one, defeating the monkey patch.
1575 #
1584 #
1576 # As a work around we use an unfiltered repo here. We should do something
1585 # As a work around we use an unfiltered repo here. We should do something
1577 # cleaner instead.
1586 # cleaner instead.
1578 repo = repo.unfiltered()
1587 repo = repo.unfiltered()
1579 oldstatus = repo.status
1588 oldstatus = repo.status
1580
1589
1581 def overridestatus(
1590 def overridestatus(
1582 node1=b'.',
1591 node1=b'.',
1583 node2=None,
1592 node2=None,
1584 match=None,
1593 match=None,
1585 ignored=False,
1594 ignored=False,
1586 clean=False,
1595 clean=False,
1587 unknown=False,
1596 unknown=False,
1588 listsubrepos=False,
1597 listsubrepos=False,
1589 ):
1598 ):
1590 r = oldstatus(
1599 r = oldstatus(
1591 node1, node2, match, ignored, clean, unknown, listsubrepos
1600 node1, node2, match, ignored, clean, unknown, listsubrepos
1592 )
1601 )
1593 lfdirstate = lfutil.openlfdirstate(ui, repo)
1602 lfdirstate = lfutil.openlfdirstate(ui, repo)
1594 unknown = [
1603 unknown = [
1595 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1604 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1596 ]
1605 ]
1597 ignored = [
1606 ignored = [
1598 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1607 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1599 ]
1608 ]
1600 return scmutil.status(
1609 return scmutil.status(
1601 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1610 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1602 )
1611 )
1603
1612
1604 repo.status = overridestatus
1613 repo.status = overridestatus
1605 orig(ui, repo, *dirs, **opts)
1614 orig(ui, repo, *dirs, **opts)
1606 repo.status = oldstatus
1615 repo.status = oldstatus
1607
1616
1608
1617
1609 @eh.wrapcommand(b'rollback')
1618 @eh.wrapcommand(b'rollback')
1610 def overriderollback(orig, ui, repo, **opts):
1619 def overriderollback(orig, ui, repo, **opts):
1611 with repo.wlock():
1620 with repo.wlock():
1612 before = repo.dirstate.parents()
1621 before = repo.dirstate.parents()
1613 orphans = {
1622 orphans = {
1614 f
1623 f
1615 for f in repo.dirstate
1624 for f in repo.dirstate
1616 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1625 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1617 }
1626 }
1618 result = orig(ui, repo, **opts)
1627 result = orig(ui, repo, **opts)
1619 after = repo.dirstate.parents()
1628 after = repo.dirstate.parents()
1620 if before == after:
1629 if before == after:
1621 return result # no need to restore standins
1630 return result # no need to restore standins
1622
1631
1623 pctx = repo[b'.']
1632 pctx = repo[b'.']
1624 for f in repo.dirstate:
1633 for f in repo.dirstate:
1625 if lfutil.isstandin(f):
1634 if lfutil.isstandin(f):
1626 orphans.discard(f)
1635 orphans.discard(f)
1627 if repo.dirstate.get_entry(f).removed:
1636 if repo.dirstate.get_entry(f).removed:
1628 repo.wvfs.unlinkpath(f, ignoremissing=True)
1637 repo.wvfs.unlinkpath(f, ignoremissing=True)
1629 elif f in pctx:
1638 elif f in pctx:
1630 fctx = pctx[f]
1639 fctx = pctx[f]
1631 repo.wwrite(f, fctx.data(), fctx.flags())
1640 repo.wwrite(f, fctx.data(), fctx.flags())
1632 else:
1641 else:
1633 # content of standin is not so important in 'a',
1642 # content of standin is not so important in 'a',
1634 # 'm' or 'n' (coming from the 2nd parent) cases
1643 # 'm' or 'n' (coming from the 2nd parent) cases
1635 lfutil.writestandin(repo, f, b'', False)
1644 lfutil.writestandin(repo, f, b'', False)
1636 for standin in orphans:
1645 for standin in orphans:
1637 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1646 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1638
1647
1639 return result
1648 return result
1640
1649
1641
1650
1642 @eh.wrapcommand(b'transplant', extension=b'transplant')
1651 @eh.wrapcommand(b'transplant', extension=b'transplant')
1643 def overridetransplant(orig, ui, repo, *revs, **opts):
1652 def overridetransplant(orig, ui, repo, *revs, **opts):
1644 resuming = opts.get('continue')
1653 resuming = opts.get('continue')
1645 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1654 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1646 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1655 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1647 try:
1656 try:
1648 result = orig(ui, repo, *revs, **opts)
1657 result = orig(ui, repo, *revs, **opts)
1649 finally:
1658 finally:
1650 repo._lfstatuswriters.pop()
1659 repo._lfstatuswriters.pop()
1651 repo._lfcommithooks.pop()
1660 repo._lfcommithooks.pop()
1652 return result
1661 return result
1653
1662
1654
1663
1655 @eh.wrapcommand(b'cat')
1664 @eh.wrapcommand(b'cat')
1656 def overridecat(orig, ui, repo, file1, *pats, **opts):
1665 def overridecat(orig, ui, repo, file1, *pats, **opts):
1657 opts = pycompat.byteskwargs(opts)
1666 opts = pycompat.byteskwargs(opts)
1658 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'))
1667 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'))
1659 err = 1
1668 err = 1
1660 notbad = set()
1669 notbad = set()
1661 m = scmutil.match(ctx, (file1,) + pats, opts)
1670 m = scmutil.match(ctx, (file1,) + pats, opts)
1662 origmatchfn = m.matchfn
1671 origmatchfn = m.matchfn
1663
1672
1664 def lfmatchfn(f):
1673 def lfmatchfn(f):
1665 if origmatchfn(f):
1674 if origmatchfn(f):
1666 return True
1675 return True
1667 lf = lfutil.splitstandin(f)
1676 lf = lfutil.splitstandin(f)
1668 if lf is None:
1677 if lf is None:
1669 return False
1678 return False
1670 notbad.add(lf)
1679 notbad.add(lf)
1671 return origmatchfn(lf)
1680 return origmatchfn(lf)
1672
1681
1673 m.matchfn = lfmatchfn
1682 m.matchfn = lfmatchfn
1674 origbadfn = m.bad
1683 origbadfn = m.bad
1675
1684
1676 def lfbadfn(f, msg):
1685 def lfbadfn(f, msg):
1677 if not f in notbad:
1686 if not f in notbad:
1678 origbadfn(f, msg)
1687 origbadfn(f, msg)
1679
1688
1680 m.bad = lfbadfn
1689 m.bad = lfbadfn
1681
1690
1682 origvisitdirfn = m.visitdir
1691 origvisitdirfn = m.visitdir
1683
1692
1684 def lfvisitdirfn(dir):
1693 def lfvisitdirfn(dir):
1685 if dir == lfutil.shortname:
1694 if dir == lfutil.shortname:
1686 return True
1695 return True
1687 ret = origvisitdirfn(dir)
1696 ret = origvisitdirfn(dir)
1688 if ret:
1697 if ret:
1689 return ret
1698 return ret
1690 lf = lfutil.splitstandin(dir)
1699 lf = lfutil.splitstandin(dir)
1691 if lf is None:
1700 if lf is None:
1692 return False
1701 return False
1693 return origvisitdirfn(lf)
1702 return origvisitdirfn(lf)
1694
1703
1695 m.visitdir = lfvisitdirfn
1704 m.visitdir = lfvisitdirfn
1696
1705
1697 for f in ctx.walk(m):
1706 for f in ctx.walk(m):
1698 with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
1707 with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
1699 lf = lfutil.splitstandin(f)
1708 lf = lfutil.splitstandin(f)
1700 if lf is None or origmatchfn(f):
1709 if lf is None or origmatchfn(f):
1701 # duplicating unreachable code from commands.cat
1710 # duplicating unreachable code from commands.cat
1702 data = ctx[f].data()
1711 data = ctx[f].data()
1703 if opts.get(b'decode'):
1712 if opts.get(b'decode'):
1704 data = repo.wwritedata(f, data)
1713 data = repo.wwritedata(f, data)
1705 fp.write(data)
1714 fp.write(data)
1706 else:
1715 else:
1707 hash = lfutil.readasstandin(ctx[f])
1716 hash = lfutil.readasstandin(ctx[f])
1708 if not lfutil.inusercache(repo.ui, hash):
1717 if not lfutil.inusercache(repo.ui, hash):
1709 store = storefactory.openstore(repo)
1718 store = storefactory.openstore(repo)
1710 success, missing = store.get([(lf, hash)])
1719 success, missing = store.get([(lf, hash)])
1711 if len(success) != 1:
1720 if len(success) != 1:
1712 raise error.Abort(
1721 raise error.Abort(
1713 _(
1722 _(
1714 b'largefile %s is not in cache and could not be '
1723 b'largefile %s is not in cache and could not be '
1715 b'downloaded'
1724 b'downloaded'
1716 )
1725 )
1717 % lf
1726 % lf
1718 )
1727 )
1719 path = lfutil.usercachepath(repo.ui, hash)
1728 path = lfutil.usercachepath(repo.ui, hash)
1720 with open(path, b"rb") as fpin:
1729 with open(path, b"rb") as fpin:
1721 for chunk in util.filechunkiter(fpin):
1730 for chunk in util.filechunkiter(fpin):
1722 fp.write(chunk)
1731 fp.write(chunk)
1723 err = 0
1732 err = 0
1724 return err
1733 return err
1725
1734
1726
1735
1727 @eh.wrapfunction(merge, b'_update')
1736 @eh.wrapfunction(merge, b'_update')
1728 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1737 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1729 matcher = kwargs.get('matcher', None)
1738 matcher = kwargs.get('matcher', None)
1730 # note if this is a partial update
1739 # note if this is a partial update
1731 partial = matcher and not matcher.always()
1740 partial = matcher and not matcher.always()
1732 with repo.wlock():
1741 with repo.wlock():
1733 # branch | | |
1742 # branch | | |
1734 # merge | force | partial | action
1743 # merge | force | partial | action
1735 # -------+-------+---------+--------------
1744 # -------+-------+---------+--------------
1736 # x | x | x | linear-merge
1745 # x | x | x | linear-merge
1737 # o | x | x | branch-merge
1746 # o | x | x | branch-merge
1738 # x | o | x | overwrite (as clean update)
1747 # x | o | x | overwrite (as clean update)
1739 # o | o | x | force-branch-merge (*1)
1748 # o | o | x | force-branch-merge (*1)
1740 # x | x | o | (*)
1749 # x | x | o | (*)
1741 # o | x | o | (*)
1750 # o | x | o | (*)
1742 # x | o | o | overwrite (as revert)
1751 # x | o | o | overwrite (as revert)
1743 # o | o | o | (*)
1752 # o | o | o | (*)
1744 #
1753 #
1745 # (*) don't care
1754 # (*) don't care
1746 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1755 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1747
1756
1748 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1757 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1749 unsure, s = lfdirstate.status(
1758 unsure, s, mtime_boundary = lfdirstate.status(
1750 matchmod.always(),
1759 matchmod.always(),
1751 subrepos=[],
1760 subrepos=[],
1752 ignored=False,
1761 ignored=False,
1753 clean=True,
1762 clean=True,
1754 unknown=False,
1763 unknown=False,
1755 )
1764 )
1756 oldclean = set(s.clean)
1765 oldclean = set(s.clean)
1757 pctx = repo[b'.']
1766 pctx = repo[b'.']
1758 dctx = repo[node]
1767 dctx = repo[node]
1759 for lfile in unsure + s.modified:
1768 for lfile in unsure + s.modified:
1760 lfileabs = repo.wvfs.join(lfile)
1769 lfileabs = repo.wvfs.join(lfile)
1761 if not repo.wvfs.exists(lfileabs):
1770 if not repo.wvfs.exists(lfileabs):
1762 continue
1771 continue
1763 lfhash = lfutil.hashfile(lfileabs)
1772 lfhash = lfutil.hashfile(lfileabs)
1764 standin = lfutil.standin(lfile)
1773 standin = lfutil.standin(lfile)
1765 lfutil.writestandin(
1774 lfutil.writestandin(
1766 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1775 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1767 )
1776 )
1768 if standin in pctx and lfhash == lfutil.readasstandin(
1777 if standin in pctx and lfhash == lfutil.readasstandin(
1769 pctx[standin]
1778 pctx[standin]
1770 ):
1779 ):
1771 oldclean.add(lfile)
1780 oldclean.add(lfile)
1772 for lfile in s.added:
1781 for lfile in s.added:
1773 fstandin = lfutil.standin(lfile)
1782 fstandin = lfutil.standin(lfile)
1774 if fstandin not in dctx:
1783 if fstandin not in dctx:
1775 # in this case, content of standin file is meaningless
1784 # in this case, content of standin file is meaningless
1776 # (in dctx, lfile is unknown, or normal file)
1785 # (in dctx, lfile is unknown, or normal file)
1777 continue
1786 continue
1778 lfutil.updatestandin(repo, lfile, fstandin)
1787 lfutil.updatestandin(repo, lfile, fstandin)
1779 # mark all clean largefiles as dirty, just in case the update gets
1788 # mark all clean largefiles as dirty, just in case the update gets
1780 # interrupted before largefiles and lfdirstate are synchronized
1789 # interrupted before largefiles and lfdirstate are synchronized
1781 for lfile in oldclean:
1790 for lfile in oldclean:
1782 lfdirstate.set_possibly_dirty(lfile)
1791 lfdirstate.set_possibly_dirty(lfile)
1783 lfdirstate.write(repo.currenttransaction())
1792 lfdirstate.write(repo.currenttransaction())
1784
1793
1785 oldstandins = lfutil.getstandinsstate(repo)
1794 oldstandins = lfutil.getstandinsstate(repo)
1786 wc = kwargs.get('wc')
1795 wc = kwargs.get('wc')
1787 if wc and wc.isinmemory():
1796 if wc and wc.isinmemory():
1788 # largefiles is not a good candidate for in-memory merge (large
1797 # largefiles is not a good candidate for in-memory merge (large
1789 # files, custom dirstate, matcher usage).
1798 # files, custom dirstate, matcher usage).
1790 raise error.ProgrammingError(
1799 raise error.ProgrammingError(
1791 b'largefiles is not compatible with in-memory merge'
1800 b'largefiles is not compatible with in-memory merge'
1792 )
1801 )
1793 with lfdirstate.parentchange():
1802 with lfdirstate.parentchange():
1794 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1803 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1795
1804
1796 newstandins = lfutil.getstandinsstate(repo)
1805 newstandins = lfutil.getstandinsstate(repo)
1797 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1806 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1798
1807
1799 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1808 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1800 # all the ones that didn't change as clean
1809 # all the ones that didn't change as clean
1801 for lfile in oldclean.difference(filelist):
1810 for lfile in oldclean.difference(filelist):
1802 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1811 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1803 lfdirstate.write(repo.currenttransaction())
1812 lfdirstate.write(repo.currenttransaction())
1804
1813
1805 if branchmerge or force or partial:
1814 if branchmerge or force or partial:
1806 filelist.extend(s.deleted + s.removed)
1815 filelist.extend(s.deleted + s.removed)
1807
1816
1808 lfcommands.updatelfiles(
1817 lfcommands.updatelfiles(
1809 repo.ui, repo, filelist=filelist, normallookup=partial
1818 repo.ui, repo, filelist=filelist, normallookup=partial
1810 )
1819 )
1811
1820
1812 return result
1821 return result
1813
1822
1814
1823
1815 @eh.wrapfunction(scmutil, b'marktouched')
1824 @eh.wrapfunction(scmutil, b'marktouched')
1816 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1825 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1817 result = orig(repo, files, *args, **kwargs)
1826 result = orig(repo, files, *args, **kwargs)
1818
1827
1819 filelist = []
1828 filelist = []
1820 for f in files:
1829 for f in files:
1821 lf = lfutil.splitstandin(f)
1830 lf = lfutil.splitstandin(f)
1822 if lf is not None:
1831 if lf is not None:
1823 filelist.append(lf)
1832 filelist.append(lf)
1824 if filelist:
1833 if filelist:
1825 lfcommands.updatelfiles(
1834 lfcommands.updatelfiles(
1826 repo.ui,
1835 repo.ui,
1827 repo,
1836 repo,
1828 filelist=filelist,
1837 filelist=filelist,
1829 printmessage=False,
1838 printmessage=False,
1830 normallookup=True,
1839 normallookup=True,
1831 )
1840 )
1832
1841
1833 return result
1842 return result
1834
1843
1835
1844
1836 @eh.wrapfunction(upgrade_actions, b'preservedrequirements')
1845 @eh.wrapfunction(upgrade_actions, b'preservedrequirements')
1837 @eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
1846 @eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
1838 def upgraderequirements(orig, repo):
1847 def upgraderequirements(orig, repo):
1839 reqs = orig(repo)
1848 reqs = orig(repo)
1840 if b'largefiles' in repo.requirements:
1849 if b'largefiles' in repo.requirements:
1841 reqs.add(b'largefiles')
1850 reqs.add(b'largefiles')
1842 return reqs
1851 return reqs
1843
1852
1844
1853
1845 _lfscheme = b'largefile://'
1854 _lfscheme = b'largefile://'
1846
1855
1847
1856
1848 @eh.wrapfunction(urlmod, b'open')
1857 @eh.wrapfunction(urlmod, b'open')
1849 def openlargefile(orig, ui, url_, data=None, **kwargs):
1858 def openlargefile(orig, ui, url_, data=None, **kwargs):
1850 if url_.startswith(_lfscheme):
1859 if url_.startswith(_lfscheme):
1851 if data:
1860 if data:
1852 msg = b"cannot use data on a 'largefile://' url"
1861 msg = b"cannot use data on a 'largefile://' url"
1853 raise error.ProgrammingError(msg)
1862 raise error.ProgrammingError(msg)
1854 lfid = url_[len(_lfscheme) :]
1863 lfid = url_[len(_lfscheme) :]
1855 return storefactory.getlfile(ui, lfid)
1864 return storefactory.getlfile(ui, lfid)
1856 else:
1865 else:
1857 return orig(ui, url_, data=data, **kwargs)
1866 return orig(ui, url_, data=data, **kwargs)
@@ -1,456 +1,468 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''setup for largefiles repositories: reposetup'''
9 '''setup for largefiles repositories: reposetup'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import copy
12 import copy
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 from mercurial import (
16 from mercurial import (
17 error,
17 error,
18 extensions,
18 extensions,
19 localrepo,
19 localrepo,
20 match as matchmod,
20 match as matchmod,
21 scmutil,
21 scmutil,
22 util,
22 util,
23 )
23 )
24
24
25 from mercurial.dirstateutils import timestamp
26
25 from . import (
27 from . import (
26 lfcommands,
28 lfcommands,
27 lfutil,
29 lfutil,
28 )
30 )
29
31
30
32
31 def reposetup(ui, repo):
33 def reposetup(ui, repo):
32 # wire repositories should be given new wireproto functions
34 # wire repositories should be given new wireproto functions
33 # by "proto.wirereposetup()" via "hg.wirepeersetupfuncs"
35 # by "proto.wirereposetup()" via "hg.wirepeersetupfuncs"
34 if not repo.local():
36 if not repo.local():
35 return
37 return
36
38
37 class lfilesrepo(repo.__class__):
39 class lfilesrepo(repo.__class__):
38 # the mark to examine whether "repo" object enables largefiles or not
40 # the mark to examine whether "repo" object enables largefiles or not
39 _largefilesenabled = True
41 _largefilesenabled = True
40
42
41 lfstatus = False
43 lfstatus = False
42
44
43 # When lfstatus is set, return a context that gives the names
45 # When lfstatus is set, return a context that gives the names
44 # of largefiles instead of their corresponding standins and
46 # of largefiles instead of their corresponding standins and
45 # identifies the largefiles as always binary, regardless of
47 # identifies the largefiles as always binary, regardless of
46 # their actual contents.
48 # their actual contents.
47 def __getitem__(self, changeid):
49 def __getitem__(self, changeid):
48 ctx = super(lfilesrepo, self).__getitem__(changeid)
50 ctx = super(lfilesrepo, self).__getitem__(changeid)
49 if self.lfstatus:
51 if self.lfstatus:
50
52
51 def files(orig):
53 def files(orig):
52 filenames = orig()
54 filenames = orig()
53 return [lfutil.splitstandin(f) or f for f in filenames]
55 return [lfutil.splitstandin(f) or f for f in filenames]
54
56
55 extensions.wrapfunction(ctx, 'files', files)
57 extensions.wrapfunction(ctx, 'files', files)
56
58
57 def manifest(orig):
59 def manifest(orig):
58 man1 = orig()
60 man1 = orig()
59
61
60 class lfilesmanifest(man1.__class__):
62 class lfilesmanifest(man1.__class__):
61 def __contains__(self, filename):
63 def __contains__(self, filename):
62 orig = super(lfilesmanifest, self).__contains__
64 orig = super(lfilesmanifest, self).__contains__
63 return orig(filename) or orig(
65 return orig(filename) or orig(
64 lfutil.standin(filename)
66 lfutil.standin(filename)
65 )
67 )
66
68
67 man1.__class__ = lfilesmanifest
69 man1.__class__ = lfilesmanifest
68 return man1
70 return man1
69
71
70 extensions.wrapfunction(ctx, 'manifest', manifest)
72 extensions.wrapfunction(ctx, 'manifest', manifest)
71
73
72 def filectx(orig, path, fileid=None, filelog=None):
74 def filectx(orig, path, fileid=None, filelog=None):
73 try:
75 try:
74 if filelog is not None:
76 if filelog is not None:
75 result = orig(path, fileid, filelog)
77 result = orig(path, fileid, filelog)
76 else:
78 else:
77 result = orig(path, fileid)
79 result = orig(path, fileid)
78 except error.LookupError:
80 except error.LookupError:
79 # Adding a null character will cause Mercurial to
81 # Adding a null character will cause Mercurial to
80 # identify this as a binary file.
82 # identify this as a binary file.
81 if filelog is not None:
83 if filelog is not None:
82 result = orig(lfutil.standin(path), fileid, filelog)
84 result = orig(lfutil.standin(path), fileid, filelog)
83 else:
85 else:
84 result = orig(lfutil.standin(path), fileid)
86 result = orig(lfutil.standin(path), fileid)
85 olddata = result.data
87 olddata = result.data
86 result.data = lambda: olddata() + b'\0'
88 result.data = lambda: olddata() + b'\0'
87 return result
89 return result
88
90
89 extensions.wrapfunction(ctx, 'filectx', filectx)
91 extensions.wrapfunction(ctx, 'filectx', filectx)
90
92
91 return ctx
93 return ctx
92
94
93 # Figure out the status of big files and insert them into the
95 # Figure out the status of big files and insert them into the
94 # appropriate list in the result. Also removes standin files
96 # appropriate list in the result. Also removes standin files
95 # from the listing. Revert to the original status if
97 # from the listing. Revert to the original status if
96 # self.lfstatus is False.
98 # self.lfstatus is False.
97 # XXX large file status is buggy when used on repo proxy.
99 # XXX large file status is buggy when used on repo proxy.
98 # XXX this needs to be investigated.
100 # XXX this needs to be investigated.
99 @localrepo.unfilteredmethod
101 @localrepo.unfilteredmethod
100 def status(
102 def status(
101 self,
103 self,
102 node1=b'.',
104 node1=b'.',
103 node2=None,
105 node2=None,
104 match=None,
106 match=None,
105 ignored=False,
107 ignored=False,
106 clean=False,
108 clean=False,
107 unknown=False,
109 unknown=False,
108 listsubrepos=False,
110 listsubrepos=False,
109 ):
111 ):
110 listignored, listclean, listunknown = ignored, clean, unknown
112 listignored, listclean, listunknown = ignored, clean, unknown
111 orig = super(lfilesrepo, self).status
113 orig = super(lfilesrepo, self).status
112 if not self.lfstatus:
114 if not self.lfstatus:
113 return orig(
115 return orig(
114 node1,
116 node1,
115 node2,
117 node2,
116 match,
118 match,
117 listignored,
119 listignored,
118 listclean,
120 listclean,
119 listunknown,
121 listunknown,
120 listsubrepos,
122 listsubrepos,
121 )
123 )
122
124
123 # some calls in this function rely on the old version of status
125 # some calls in this function rely on the old version of status
124 self.lfstatus = False
126 self.lfstatus = False
125 ctx1 = self[node1]
127 ctx1 = self[node1]
126 ctx2 = self[node2]
128 ctx2 = self[node2]
127 working = ctx2.rev() is None
129 working = ctx2.rev() is None
128 parentworking = working and ctx1 == self[b'.']
130 parentworking = working and ctx1 == self[b'.']
129
131
130 if match is None:
132 if match is None:
131 match = matchmod.always()
133 match = matchmod.always()
132
134
133 try:
135 try:
134 # updating the dirstate is optional
136 # updating the dirstate is optional
135 # so we don't wait on the lock
137 # so we don't wait on the lock
136 wlock = self.wlock(False)
138 wlock = self.wlock(False)
137 gotlock = True
139 gotlock = True
138 except error.LockError:
140 except error.LockError:
139 wlock = util.nullcontextmanager()
141 wlock = util.nullcontextmanager()
140 gotlock = False
142 gotlock = False
141 with wlock:
143 with wlock:
142
144
143 # First check if paths or patterns were specified on the
145 # First check if paths or patterns were specified on the
144 # command line. If there were, and they don't match any
146 # command line. If there were, and they don't match any
145 # largefiles, we should just bail here and let super
147 # largefiles, we should just bail here and let super
146 # handle it -- thus gaining a big performance boost.
148 # handle it -- thus gaining a big performance boost.
147 lfdirstate = lfutil.openlfdirstate(ui, self)
149 lfdirstate = lfutil.openlfdirstate(ui, self)
148 if not match.always():
150 if not match.always():
149 for f in lfdirstate:
151 for f in lfdirstate:
150 if match(f):
152 if match(f):
151 break
153 break
152 else:
154 else:
153 return orig(
155 return orig(
154 node1,
156 node1,
155 node2,
157 node2,
156 match,
158 match,
157 listignored,
159 listignored,
158 listclean,
160 listclean,
159 listunknown,
161 listunknown,
160 listsubrepos,
162 listsubrepos,
161 )
163 )
162
164
163 # Create a copy of match that matches standins instead
165 # Create a copy of match that matches standins instead
164 # of largefiles.
166 # of largefiles.
165 def tostandins(files):
167 def tostandins(files):
166 if not working:
168 if not working:
167 return files
169 return files
168 newfiles = []
170 newfiles = []
169 dirstate = self.dirstate
171 dirstate = self.dirstate
170 for f in files:
172 for f in files:
171 sf = lfutil.standin(f)
173 sf = lfutil.standin(f)
172 if sf in dirstate:
174 if sf in dirstate:
173 newfiles.append(sf)
175 newfiles.append(sf)
174 elif dirstate.hasdir(sf):
176 elif dirstate.hasdir(sf):
175 # Directory entries could be regular or
177 # Directory entries could be regular or
176 # standin, check both
178 # standin, check both
177 newfiles.extend((f, sf))
179 newfiles.extend((f, sf))
178 else:
180 else:
179 newfiles.append(f)
181 newfiles.append(f)
180 return newfiles
182 return newfiles
181
183
182 m = copy.copy(match)
184 m = copy.copy(match)
183 m._files = tostandins(m._files)
185 m._files = tostandins(m._files)
184
186
185 result = orig(
187 result = orig(
186 node1, node2, m, ignored, clean, unknown, listsubrepos
188 node1, node2, m, ignored, clean, unknown, listsubrepos
187 )
189 )
188 if working:
190 if working:
189
191
190 def sfindirstate(f):
192 def sfindirstate(f):
191 sf = lfutil.standin(f)
193 sf = lfutil.standin(f)
192 dirstate = self.dirstate
194 dirstate = self.dirstate
193 return sf in dirstate or dirstate.hasdir(sf)
195 return sf in dirstate or dirstate.hasdir(sf)
194
196
195 match._files = [f for f in match._files if sfindirstate(f)]
197 match._files = [f for f in match._files if sfindirstate(f)]
196 # Don't waste time getting the ignored and unknown
198 # Don't waste time getting the ignored and unknown
197 # files from lfdirstate
199 # files from lfdirstate
198 unsure, s = lfdirstate.status(
200 unsure, s, mtime_boundary = lfdirstate.status(
199 match,
201 match,
200 subrepos=[],
202 subrepos=[],
201 ignored=False,
203 ignored=False,
202 clean=listclean,
204 clean=listclean,
203 unknown=False,
205 unknown=False,
204 )
206 )
205 (modified, added, removed, deleted, clean) = (
207 (modified, added, removed, deleted, clean) = (
206 s.modified,
208 s.modified,
207 s.added,
209 s.added,
208 s.removed,
210 s.removed,
209 s.deleted,
211 s.deleted,
210 s.clean,
212 s.clean,
211 )
213 )
212 if parentworking:
214 if parentworking:
215 wctx = repo[None]
213 for lfile in unsure:
216 for lfile in unsure:
214 standin = lfutil.standin(lfile)
217 standin = lfutil.standin(lfile)
215 if standin not in ctx1:
218 if standin not in ctx1:
216 # from second parent
219 # from second parent
217 modified.append(lfile)
220 modified.append(lfile)
218 elif lfutil.readasstandin(
221 elif lfutil.readasstandin(
219 ctx1[standin]
222 ctx1[standin]
220 ) != lfutil.hashfile(self.wjoin(lfile)):
223 ) != lfutil.hashfile(self.wjoin(lfile)):
221 modified.append(lfile)
224 modified.append(lfile)
222 else:
225 else:
223 if listclean:
226 if listclean:
224 clean.append(lfile)
227 clean.append(lfile)
225 lfdirstate.set_clean(lfile)
228 s = wctx[lfile].lstat()
229 mode = s.st_mode
230 size = s.st_size
231 mtime = timestamp.reliable_mtime_of(
232 s, mtime_boundary
233 )
234 if mtime is not None:
235 cache_data = (mode, size, mtime)
236 lfdirstate.set_clean(lfile, cache_data)
226 else:
237 else:
227 tocheck = unsure + modified + added + clean
238 tocheck = unsure + modified + added + clean
228 modified, added, clean = [], [], []
239 modified, added, clean = [], [], []
229 checkexec = self.dirstate._checkexec
240 checkexec = self.dirstate._checkexec
230
241
231 for lfile in tocheck:
242 for lfile in tocheck:
232 standin = lfutil.standin(lfile)
243 standin = lfutil.standin(lfile)
233 if standin in ctx1:
244 if standin in ctx1:
234 abslfile = self.wjoin(lfile)
245 abslfile = self.wjoin(lfile)
235 if (
246 if (
236 lfutil.readasstandin(ctx1[standin])
247 lfutil.readasstandin(ctx1[standin])
237 != lfutil.hashfile(abslfile)
248 != lfutil.hashfile(abslfile)
238 ) or (
249 ) or (
239 checkexec
250 checkexec
240 and (b'x' in ctx1.flags(standin))
251 and (b'x' in ctx1.flags(standin))
241 != bool(lfutil.getexecutable(abslfile))
252 != bool(lfutil.getexecutable(abslfile))
242 ):
253 ):
243 modified.append(lfile)
254 modified.append(lfile)
244 elif listclean:
255 elif listclean:
245 clean.append(lfile)
256 clean.append(lfile)
246 else:
257 else:
247 added.append(lfile)
258 added.append(lfile)
248
259
249 # at this point, 'removed' contains largefiles
260 # at this point, 'removed' contains largefiles
250 # marked as 'R' in the working context.
261 # marked as 'R' in the working context.
251 # then, largefiles not managed also in the target
262 # then, largefiles not managed also in the target
252 # context should be excluded from 'removed'.
263 # context should be excluded from 'removed'.
253 removed = [
264 removed = [
254 lfile
265 lfile
255 for lfile in removed
266 for lfile in removed
256 if lfutil.standin(lfile) in ctx1
267 if lfutil.standin(lfile) in ctx1
257 ]
268 ]
258
269
259 # Standins no longer found in lfdirstate have been deleted
270 # Standins no longer found in lfdirstate have been deleted
260 for standin in ctx1.walk(lfutil.getstandinmatcher(self)):
271 for standin in ctx1.walk(lfutil.getstandinmatcher(self)):
261 lfile = lfutil.splitstandin(standin)
272 lfile = lfutil.splitstandin(standin)
262 if not match(lfile):
273 if not match(lfile):
263 continue
274 continue
264 if lfile not in lfdirstate:
275 if lfile not in lfdirstate:
265 deleted.append(lfile)
276 deleted.append(lfile)
266 # Sync "largefile has been removed" back to the
277 # Sync "largefile has been removed" back to the
267 # standin. Removing a file as a side effect of
278 # standin. Removing a file as a side effect of
268 # running status is gross, but the alternatives (if
279 # running status is gross, but the alternatives (if
269 # any) are worse.
280 # any) are worse.
270 self.wvfs.unlinkpath(standin, ignoremissing=True)
281 self.wvfs.unlinkpath(standin, ignoremissing=True)
271
282
272 # Filter result lists
283 # Filter result lists
273 result = list(result)
284 result = list(result)
274
285
275 # Largefiles are not really removed when they're
286 # Largefiles are not really removed when they're
276 # still in the normal dirstate. Likewise, normal
287 # still in the normal dirstate. Likewise, normal
277 # files are not really removed if they are still in
288 # files are not really removed if they are still in
278 # lfdirstate. This happens in merges where files
289 # lfdirstate. This happens in merges where files
279 # change type.
290 # change type.
280 removed = [f for f in removed if f not in self.dirstate]
291 removed = [f for f in removed if f not in self.dirstate]
281 result[2] = [f for f in result[2] if f not in lfdirstate]
292 result[2] = [f for f in result[2] if f not in lfdirstate]
282
293
283 lfiles = set(lfdirstate)
294 lfiles = set(lfdirstate)
284 # Unknown files
295 # Unknown files
285 result[4] = set(result[4]).difference(lfiles)
296 result[4] = set(result[4]).difference(lfiles)
286 # Ignored files
297 # Ignored files
287 result[5] = set(result[5]).difference(lfiles)
298 result[5] = set(result[5]).difference(lfiles)
288 # combine normal files and largefiles
299 # combine normal files and largefiles
289 normals = [
300 normals = [
290 [fn for fn in filelist if not lfutil.isstandin(fn)]
301 [fn for fn in filelist if not lfutil.isstandin(fn)]
291 for filelist in result
302 for filelist in result
292 ]
303 ]
293 lfstatus = (
304 lfstatus = (
294 modified,
305 modified,
295 added,
306 added,
296 removed,
307 removed,
297 deleted,
308 deleted,
298 [],
309 [],
299 [],
310 [],
300 clean,
311 clean,
301 )
312 )
302 result = [
313 result = [
303 sorted(list1 + list2)
314 sorted(list1 + list2)
304 for (list1, list2) in zip(normals, lfstatus)
315 for (list1, list2) in zip(normals, lfstatus)
305 ]
316 ]
306 else: # not against working directory
317 else: # not against working directory
307 result = [
318 result = [
308 [lfutil.splitstandin(f) or f for f in items]
319 [lfutil.splitstandin(f) or f for f in items]
309 for items in result
320 for items in result
310 ]
321 ]
311
322
312 if gotlock:
323 if gotlock:
313 lfdirstate.write(self.currenttransaction())
324 lfdirstate.write(self.currenttransaction())
314
325
315 self.lfstatus = True
326 self.lfstatus = True
316 return scmutil.status(*result)
327 return scmutil.status(*result)
317
328
318 def commitctx(self, ctx, *args, **kwargs):
329 def commitctx(self, ctx, *args, **kwargs):
319 node = super(lfilesrepo, self).commitctx(ctx, *args, **kwargs)
330 node = super(lfilesrepo, self).commitctx(ctx, *args, **kwargs)
320
331
321 class lfilesctx(ctx.__class__):
332 class lfilesctx(ctx.__class__):
322 def markcommitted(self, node):
333 def markcommitted(self, node):
323 orig = super(lfilesctx, self).markcommitted
334 orig = super(lfilesctx, self).markcommitted
324 return lfutil.markcommitted(orig, self, node)
335 return lfutil.markcommitted(orig, self, node)
325
336
326 ctx.__class__ = lfilesctx
337 ctx.__class__ = lfilesctx
327 return node
338 return node
328
339
329 # Before commit, largefile standins have not had their
340 # Before commit, largefile standins have not had their
330 # contents updated to reflect the hash of their largefile.
341 # contents updated to reflect the hash of their largefile.
331 # Do that here.
342 # Do that here.
332 def commit(
343 def commit(
333 self,
344 self,
334 text=b"",
345 text=b"",
335 user=None,
346 user=None,
336 date=None,
347 date=None,
337 match=None,
348 match=None,
338 force=False,
349 force=False,
339 editor=False,
350 editor=False,
340 extra=None,
351 extra=None,
341 ):
352 ):
342 if extra is None:
353 if extra is None:
343 extra = {}
354 extra = {}
344 orig = super(lfilesrepo, self).commit
355 orig = super(lfilesrepo, self).commit
345
356
346 with self.wlock():
357 with self.wlock():
347 lfcommithook = self._lfcommithooks[-1]
358 lfcommithook = self._lfcommithooks[-1]
348 match = lfcommithook(self, match)
359 match = lfcommithook(self, match)
349 result = orig(
360 result = orig(
350 text=text,
361 text=text,
351 user=user,
362 user=user,
352 date=date,
363 date=date,
353 match=match,
364 match=match,
354 force=force,
365 force=force,
355 editor=editor,
366 editor=editor,
356 extra=extra,
367 extra=extra,
357 )
368 )
358 return result
369 return result
359
370
360 # TODO: _subdirlfs should be moved into "lfutil.py", because
371 # TODO: _subdirlfs should be moved into "lfutil.py", because
361 # it is referred only from "lfutil.updatestandinsbymatch"
372 # it is referred only from "lfutil.updatestandinsbymatch"
362 def _subdirlfs(self, files, lfiles):
373 def _subdirlfs(self, files, lfiles):
363 """
374 """
364 Adjust matched file list
375 Adjust matched file list
365 If we pass a directory to commit whose only committable files
376 If we pass a directory to commit whose only committable files
366 are largefiles, the core commit code aborts before finding
377 are largefiles, the core commit code aborts before finding
367 the largefiles.
378 the largefiles.
368 So we do the following:
379 So we do the following:
369 For directories that only have largefiles as matches,
380 For directories that only have largefiles as matches,
370 we explicitly add the largefiles to the match list and remove
381 we explicitly add the largefiles to the match list and remove
371 the directory.
382 the directory.
372 In other cases, we leave the match list unmodified.
383 In other cases, we leave the match list unmodified.
373 """
384 """
374 actualfiles = []
385 actualfiles = []
375 dirs = []
386 dirs = []
376 regulars = []
387 regulars = []
377
388
378 for f in files:
389 for f in files:
379 if lfutil.isstandin(f + b'/'):
390 if lfutil.isstandin(f + b'/'):
380 raise error.Abort(
391 raise error.Abort(
381 _(b'file "%s" is a largefile standin') % f,
392 _(b'file "%s" is a largefile standin') % f,
382 hint=b'commit the largefile itself instead',
393 hint=b'commit the largefile itself instead',
383 )
394 )
384 # Scan directories
395 # Scan directories
385 if self.wvfs.isdir(f):
396 if self.wvfs.isdir(f):
386 dirs.append(f)
397 dirs.append(f)
387 else:
398 else:
388 regulars.append(f)
399 regulars.append(f)
389
400
390 for f in dirs:
401 for f in dirs:
391 matcheddir = False
402 matcheddir = False
392 d = self.dirstate.normalize(f) + b'/'
403 d = self.dirstate.normalize(f) + b'/'
393 # Check for matched normal files
404 # Check for matched normal files
394 for mf in regulars:
405 for mf in regulars:
395 if self.dirstate.normalize(mf).startswith(d):
406 if self.dirstate.normalize(mf).startswith(d):
396 actualfiles.append(f)
407 actualfiles.append(f)
397 matcheddir = True
408 matcheddir = True
398 break
409 break
399 if not matcheddir:
410 if not matcheddir:
400 # If no normal match, manually append
411 # If no normal match, manually append
401 # any matching largefiles
412 # any matching largefiles
402 for lf in lfiles:
413 for lf in lfiles:
403 if self.dirstate.normalize(lf).startswith(d):
414 if self.dirstate.normalize(lf).startswith(d):
404 actualfiles.append(lf)
415 actualfiles.append(lf)
405 if not matcheddir:
416 if not matcheddir:
406 # There may still be normal files in the dir, so
417 # There may still be normal files in the dir, so
407 # add a directory to the list, which
418 # add a directory to the list, which
408 # forces status/dirstate to walk all files and
419 # forces status/dirstate to walk all files and
409 # call the match function on the matcher, even
420 # call the match function on the matcher, even
410 # on case sensitive filesystems.
421 # on case sensitive filesystems.
411 actualfiles.append(b'.')
422 actualfiles.append(b'.')
412 matcheddir = True
423 matcheddir = True
413 # Nothing in dir, so readd it
424 # Nothing in dir, so readd it
414 # and let commit reject it
425 # and let commit reject it
415 if not matcheddir:
426 if not matcheddir:
416 actualfiles.append(f)
427 actualfiles.append(f)
417
428
418 # Always add normal files
429 # Always add normal files
419 actualfiles += regulars
430 actualfiles += regulars
420 return actualfiles
431 return actualfiles
421
432
422 repo.__class__ = lfilesrepo
433 repo.__class__ = lfilesrepo
423
434
424 # stack of hooks being executed before committing.
435 # stack of hooks being executed before committing.
425 # only last element ("_lfcommithooks[-1]") is used for each committing.
436 # only last element ("_lfcommithooks[-1]") is used for each committing.
426 repo._lfcommithooks = [lfutil.updatestandinsbymatch]
437 repo._lfcommithooks = [lfutil.updatestandinsbymatch]
427
438
428 # Stack of status writer functions taking "*msg, **opts" arguments
439 # Stack of status writer functions taking "*msg, **opts" arguments
429 # like "ui.status()". Only last element ("_lfstatuswriters[-1]")
440 # like "ui.status()". Only last element ("_lfstatuswriters[-1]")
430 # is used to write status out.
441 # is used to write status out.
431 repo._lfstatuswriters = [ui.status]
442 repo._lfstatuswriters = [ui.status]
432
443
433 def prepushoutgoinghook(pushop):
444 def prepushoutgoinghook(pushop):
434 """Push largefiles for pushop before pushing revisions."""
445 """Push largefiles for pushop before pushing revisions."""
435 lfrevs = pushop.lfrevs
446 lfrevs = pushop.lfrevs
436 if lfrevs is None:
447 if lfrevs is None:
437 lfrevs = pushop.outgoing.missing
448 lfrevs = pushop.outgoing.missing
438 if lfrevs:
449 if lfrevs:
439 toupload = set()
450 toupload = set()
440 addfunc = lambda fn, lfhash: toupload.add(lfhash)
451 addfunc = lambda fn, lfhash: toupload.add(lfhash)
441 lfutil.getlfilestoupload(pushop.repo, lfrevs, addfunc)
452 lfutil.getlfilestoupload(pushop.repo, lfrevs, addfunc)
442 lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload)
453 lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload)
443
454
444 repo.prepushoutgoinghooks.add(b"largefiles", prepushoutgoinghook)
455 repo.prepushoutgoinghooks.add(b"largefiles", prepushoutgoinghook)
445
456
446 def checkrequireslfiles(ui, repo, **kwargs):
457 def checkrequireslfiles(ui, repo, **kwargs):
447 if b'largefiles' not in repo.requirements and any(
458 with repo.lock():
448 lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles()
459 if b'largefiles' not in repo.requirements and any(
449 ):
460 lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles()
450 repo.requirements.add(b'largefiles')
461 ):
451 scmutil.writereporequirements(repo)
462 repo.requirements.add(b'largefiles')
463 scmutil.writereporequirements(repo)
452
464
453 ui.setconfig(
465 ui.setconfig(
454 b'hooks', b'changegroup.lfiles', checkrequireslfiles, b'largefiles'
466 b'hooks', b'changegroup.lfiles', checkrequireslfiles, b'largefiles'
455 )
467 )
456 ui.setconfig(b'hooks', b'commit.lfiles', checkrequireslfiles, b'largefiles')
468 ui.setconfig(b'hooks', b'commit.lfiles', checkrequireslfiles, b'largefiles')
@@ -1,444 +1,447 b''
1 # lfs - hash-preserving large file support using Git-LFS protocol
1 # lfs - hash-preserving large file support using Git-LFS protocol
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """lfs - large file support (EXPERIMENTAL)
8 """lfs - large file support (EXPERIMENTAL)
9
9
10 This extension allows large files to be tracked outside of the normal
10 This extension allows large files to be tracked outside of the normal
11 repository storage and stored on a centralized server, similar to the
11 repository storage and stored on a centralized server, similar to the
12 ``largefiles`` extension. The ``git-lfs`` protocol is used when
12 ``largefiles`` extension. The ``git-lfs`` protocol is used when
13 communicating with the server, so existing git infrastructure can be
13 communicating with the server, so existing git infrastructure can be
14 harnessed. Even though the files are stored outside of the repository,
14 harnessed. Even though the files are stored outside of the repository,
15 they are still integrity checked in the same manner as normal files.
15 they are still integrity checked in the same manner as normal files.
16
16
17 The files stored outside of the repository are downloaded on demand,
17 The files stored outside of the repository are downloaded on demand,
18 which reduces the time to clone, and possibly the local disk usage.
18 which reduces the time to clone, and possibly the local disk usage.
19 This changes fundamental workflows in a DVCS, so careful thought
19 This changes fundamental workflows in a DVCS, so careful thought
20 should be given before deploying it. :hg:`convert` can be used to
20 should be given before deploying it. :hg:`convert` can be used to
21 convert LFS repositories to normal repositories that no longer
21 convert LFS repositories to normal repositories that no longer
22 require this extension, and do so without changing the commit hashes.
22 require this extension, and do so without changing the commit hashes.
23 This allows the extension to be disabled if the centralized workflow
23 This allows the extension to be disabled if the centralized workflow
24 becomes burdensome. However, the pre and post convert clones will
24 becomes burdensome. However, the pre and post convert clones will
25 not be able to communicate with each other unless the extension is
25 not be able to communicate with each other unless the extension is
26 enabled on both.
26 enabled on both.
27
27
28 To start a new repository, or to add LFS files to an existing one, just
28 To start a new repository, or to add LFS files to an existing one, just
29 create an ``.hglfs`` file as described below in the root directory of
29 create an ``.hglfs`` file as described below in the root directory of
30 the repository. Typically, this file should be put under version
30 the repository. Typically, this file should be put under version
31 control, so that the settings will propagate to other repositories with
31 control, so that the settings will propagate to other repositories with
32 push and pull. During any commit, Mercurial will consult this file to
32 push and pull. During any commit, Mercurial will consult this file to
33 determine if an added or modified file should be stored externally. The
33 determine if an added or modified file should be stored externally. The
34 type of storage depends on the characteristics of the file at each
34 type of storage depends on the characteristics of the file at each
35 commit. A file that is near a size threshold may switch back and forth
35 commit. A file that is near a size threshold may switch back and forth
36 between LFS and normal storage, as needed.
36 between LFS and normal storage, as needed.
37
37
38 Alternately, both normal repositories and largefile controlled
38 Alternately, both normal repositories and largefile controlled
39 repositories can be converted to LFS by using :hg:`convert` and the
39 repositories can be converted to LFS by using :hg:`convert` and the
40 ``lfs.track`` config option described below. The ``.hglfs`` file
40 ``lfs.track`` config option described below. The ``.hglfs`` file
41 should then be created and added, to control subsequent LFS selection.
41 should then be created and added, to control subsequent LFS selection.
42 The hashes are also unchanged in this case. The LFS and non-LFS
42 The hashes are also unchanged in this case. The LFS and non-LFS
43 repositories can be distinguished because the LFS repository will
43 repositories can be distinguished because the LFS repository will
44 abort any command if this extension is disabled.
44 abort any command if this extension is disabled.
45
45
46 Committed LFS files are held locally, until the repository is pushed.
46 Committed LFS files are held locally, until the repository is pushed.
47 Prior to pushing the normal repository data, the LFS files that are
47 Prior to pushing the normal repository data, the LFS files that are
48 tracked by the outgoing commits are automatically uploaded to the
48 tracked by the outgoing commits are automatically uploaded to the
49 configured central server. No LFS files are transferred on
49 configured central server. No LFS files are transferred on
50 :hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
50 :hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
51 demand as they need to be read, if a cached copy cannot be found
51 demand as they need to be read, if a cached copy cannot be found
52 locally. Both committing and downloading an LFS file will link the
52 locally. Both committing and downloading an LFS file will link the
53 file to a usercache, to speed up future access. See the `usercache`
53 file to a usercache, to speed up future access. See the `usercache`
54 config setting described below.
54 config setting described below.
55
55
56 The extension reads its configuration from a versioned ``.hglfs``
56 The extension reads its configuration from a versioned ``.hglfs``
57 configuration file found in the root of the working directory. The
57 configuration file found in the root of the working directory. The
58 ``.hglfs`` file uses the same syntax as all other Mercurial
58 ``.hglfs`` file uses the same syntax as all other Mercurial
59 configuration files. It uses a single section, ``[track]``.
59 configuration files. It uses a single section, ``[track]``.
60
60
61 The ``[track]`` section specifies which files are stored as LFS (or
61 The ``[track]`` section specifies which files are stored as LFS (or
62 not). Each line is keyed by a file pattern, with a predicate value.
62 not). Each line is keyed by a file pattern, with a predicate value.
63 The first file pattern match is used, so put more specific patterns
63 The first file pattern match is used, so put more specific patterns
64 first. The available predicates are ``all()``, ``none()``, and
64 first. The available predicates are ``all()``, ``none()``, and
65 ``size()``. See "hg help filesets.size" for the latter.
65 ``size()``. See "hg help filesets.size" for the latter.
66
66
67 Example versioned ``.hglfs`` file::
67 Example versioned ``.hglfs`` file::
68
68
69 [track]
69 [track]
70 # No Makefile or python file, anywhere, will be LFS
70 # No Makefile or python file, anywhere, will be LFS
71 **Makefile = none()
71 **Makefile = none()
72 **.py = none()
72 **.py = none()
73
73
74 **.zip = all()
74 **.zip = all()
75 **.exe = size(">1MB")
75 **.exe = size(">1MB")
76
76
77 # Catchall for everything not matched above
77 # Catchall for everything not matched above
78 ** = size(">10MB")
78 ** = size(">10MB")
79
79
80 Configs::
80 Configs::
81
81
82 [lfs]
82 [lfs]
83 # Remote endpoint. Multiple protocols are supported:
83 # Remote endpoint. Multiple protocols are supported:
84 # - http(s)://user:pass@example.com/path
84 # - http(s)://user:pass@example.com/path
85 # git-lfs endpoint
85 # git-lfs endpoint
86 # - file:///tmp/path
86 # - file:///tmp/path
87 # local filesystem, usually for testing
87 # local filesystem, usually for testing
88 # if unset, lfs will assume the remote repository also handles blob storage
88 # if unset, lfs will assume the remote repository also handles blob storage
89 # for http(s) URLs. Otherwise, lfs will prompt to set this when it must
89 # for http(s) URLs. Otherwise, lfs will prompt to set this when it must
90 # use this value.
90 # use this value.
91 # (default: unset)
91 # (default: unset)
92 url = https://example.com/repo.git/info/lfs
92 url = https://example.com/repo.git/info/lfs
93
93
94 # Which files to track in LFS. Path tests are "**.extname" for file
94 # Which files to track in LFS. Path tests are "**.extname" for file
95 # extensions, and "path:under/some/directory" for path prefix. Both
95 # extensions, and "path:under/some/directory" for path prefix. Both
96 # are relative to the repository root.
96 # are relative to the repository root.
97 # File size can be tested with the "size()" fileset, and tests can be
97 # File size can be tested with the "size()" fileset, and tests can be
98 # joined with fileset operators. (See "hg help filesets.operators".)
98 # joined with fileset operators. (See "hg help filesets.operators".)
99 #
99 #
100 # Some examples:
100 # Some examples:
101 # - all() # everything
101 # - all() # everything
102 # - none() # nothing
102 # - none() # nothing
103 # - size(">20MB") # larger than 20MB
103 # - size(">20MB") # larger than 20MB
104 # - !**.txt # anything not a *.txt file
104 # - !**.txt # anything not a *.txt file
105 # - **.zip | **.tar.gz | **.7z # some types of compressed files
105 # - **.zip | **.tar.gz | **.7z # some types of compressed files
106 # - path:bin # files under "bin" in the project root
106 # - path:bin # files under "bin" in the project root
107 # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
107 # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
108 # | (path:bin & !path:/bin/README) | size(">1GB")
108 # | (path:bin & !path:/bin/README) | size(">1GB")
109 # (default: none())
109 # (default: none())
110 #
110 #
111 # This is ignored if there is a tracked '.hglfs' file, and this setting
111 # This is ignored if there is a tracked '.hglfs' file, and this setting
112 # will eventually be deprecated and removed.
112 # will eventually be deprecated and removed.
113 track = size(">10M")
113 track = size(">10M")
114
114
115 # how many times to retry before giving up on transferring an object
115 # how many times to retry before giving up on transferring an object
116 retry = 5
116 retry = 5
117
117
118 # the local directory to store lfs files for sharing across local clones.
118 # the local directory to store lfs files for sharing across local clones.
119 # If not set, the cache is located in an OS specific cache location.
119 # If not set, the cache is located in an OS specific cache location.
120 usercache = /path/to/global/cache
120 usercache = /path/to/global/cache
121 """
121 """
122
122
123 from __future__ import absolute_import
123 from __future__ import absolute_import
124
124
125 import sys
125 import sys
126
126
127 from mercurial.i18n import _
127 from mercurial.i18n import _
128 from mercurial.node import bin
128 from mercurial.node import bin
129
129
130 from mercurial import (
130 from mercurial import (
131 bundlecaches,
131 bundlecaches,
132 config,
132 config,
133 context,
133 context,
134 error,
134 error,
135 extensions,
135 extensions,
136 exthelper,
136 exthelper,
137 filelog,
137 filelog,
138 filesetlang,
138 filesetlang,
139 localrepo,
139 localrepo,
140 logcmdutil,
140 logcmdutil,
141 minifileset,
141 minifileset,
142 pycompat,
142 pycompat,
143 revlog,
143 revlog,
144 scmutil,
144 scmutil,
145 templateutil,
145 templateutil,
146 util,
146 util,
147 )
147 )
148
148
149 from mercurial.interfaces import repository
149 from mercurial.interfaces import repository
150
150
151 from . import (
151 from . import (
152 blobstore,
152 blobstore,
153 wireprotolfsserver,
153 wireprotolfsserver,
154 wrapper,
154 wrapper,
155 )
155 )
156
156
157 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
157 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
158 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
158 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
159 # be specifying the version(s) of Mercurial they are tested with, or
159 # be specifying the version(s) of Mercurial they are tested with, or
160 # leave the attribute unspecified.
160 # leave the attribute unspecified.
161 testedwith = b'ships-with-hg-core'
161 testedwith = b'ships-with-hg-core'
162
162
163 eh = exthelper.exthelper()
163 eh = exthelper.exthelper()
164 eh.merge(wrapper.eh)
164 eh.merge(wrapper.eh)
165 eh.merge(wireprotolfsserver.eh)
165 eh.merge(wireprotolfsserver.eh)
166
166
167 cmdtable = eh.cmdtable
167 cmdtable = eh.cmdtable
168 configtable = eh.configtable
168 configtable = eh.configtable
169 extsetup = eh.finalextsetup
169 extsetup = eh.finalextsetup
170 uisetup = eh.finaluisetup
170 uisetup = eh.finaluisetup
171 filesetpredicate = eh.filesetpredicate
171 filesetpredicate = eh.filesetpredicate
172 reposetup = eh.finalreposetup
172 reposetup = eh.finalreposetup
173 templatekeyword = eh.templatekeyword
173 templatekeyword = eh.templatekeyword
174
174
175 eh.configitem(
175 eh.configitem(
176 b'experimental',
176 b'experimental',
177 b'lfs.serve',
177 b'lfs.serve',
178 default=True,
178 default=True,
179 )
179 )
180 eh.configitem(
180 eh.configitem(
181 b'experimental',
181 b'experimental',
182 b'lfs.user-agent',
182 b'lfs.user-agent',
183 default=None,
183 default=None,
184 )
184 )
185 eh.configitem(
185 eh.configitem(
186 b'experimental',
186 b'experimental',
187 b'lfs.disableusercache',
187 b'lfs.disableusercache',
188 default=False,
188 default=False,
189 )
189 )
190 eh.configitem(
190 eh.configitem(
191 b'experimental',
191 b'experimental',
192 b'lfs.worker-enable',
192 b'lfs.worker-enable',
193 default=True,
193 default=True,
194 )
194 )
195
195
196 eh.configitem(
196 eh.configitem(
197 b'lfs',
197 b'lfs',
198 b'url',
198 b'url',
199 default=None,
199 default=None,
200 )
200 )
201 eh.configitem(
201 eh.configitem(
202 b'lfs',
202 b'lfs',
203 b'usercache',
203 b'usercache',
204 default=None,
204 default=None,
205 )
205 )
206 # Deprecated
206 # Deprecated
207 eh.configitem(
207 eh.configitem(
208 b'lfs',
208 b'lfs',
209 b'threshold',
209 b'threshold',
210 default=None,
210 default=None,
211 )
211 )
212 eh.configitem(
212 eh.configitem(
213 b'lfs',
213 b'lfs',
214 b'track',
214 b'track',
215 default=b'none()',
215 default=b'none()',
216 )
216 )
217 eh.configitem(
217 eh.configitem(
218 b'lfs',
218 b'lfs',
219 b'retry',
219 b'retry',
220 default=5,
220 default=5,
221 )
221 )
222
222
223 lfsprocessor = (
223 lfsprocessor = (
224 wrapper.readfromstore,
224 wrapper.readfromstore,
225 wrapper.writetostore,
225 wrapper.writetostore,
226 wrapper.bypasscheckhash,
226 wrapper.bypasscheckhash,
227 )
227 )
228
228
229
229
230 def featuresetup(ui, supported):
230 def featuresetup(ui, supported):
231 # don't die on seeing a repo with the lfs requirement
231 # don't die on seeing a repo with the lfs requirement
232 supported |= {b'lfs'}
232 supported |= {b'lfs'}
233
233
234
234
235 @eh.uisetup
235 @eh.uisetup
236 def _uisetup(ui):
236 def _uisetup(ui):
237 localrepo.featuresetupfuncs.add(featuresetup)
237 localrepo.featuresetupfuncs.add(featuresetup)
238
238
239
239
240 @eh.reposetup
240 @eh.reposetup
241 def _reposetup(ui, repo):
241 def _reposetup(ui, repo):
242 # Nothing to do with a remote repo
242 # Nothing to do with a remote repo
243 if not repo.local():
243 if not repo.local():
244 return
244 return
245
245
246 repo.svfs.lfslocalblobstore = blobstore.local(repo)
246 repo.svfs.lfslocalblobstore = blobstore.local(repo)
247 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
247 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
248
248
249 class lfsrepo(repo.__class__):
249 class lfsrepo(repo.__class__):
250 @localrepo.unfilteredmethod
250 @localrepo.unfilteredmethod
251 def commitctx(self, ctx, error=False, origctx=None):
251 def commitctx(self, ctx, error=False, origctx=None):
252 repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
252 repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
253 return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
253 return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
254
254
255 repo.__class__ = lfsrepo
255 repo.__class__ = lfsrepo
256
256
257 if b'lfs' not in repo.requirements:
257 if b'lfs' not in repo.requirements:
258
258
259 def checkrequireslfs(ui, repo, **kwargs):
259 def checkrequireslfs(ui, repo, **kwargs):
260 if b'lfs' in repo.requirements:
260 with repo.lock():
261 return 0
261 if b'lfs' in repo.requirements:
262 return 0
262
263
263 last = kwargs.get('node_last')
264 last = kwargs.get('node_last')
264 if last:
265 if last:
265 s = repo.set(b'%n:%n', bin(kwargs['node']), bin(last))
266 s = repo.set(b'%n:%n', bin(kwargs['node']), bin(last))
266 else:
267 else:
267 s = repo.set(b'%n', bin(kwargs['node']))
268 s = repo.set(b'%n', bin(kwargs['node']))
268 match = repo._storenarrowmatch
269 match = repo._storenarrowmatch
269 for ctx in s:
270 for ctx in s:
270 # TODO: is there a way to just walk the files in the commit?
271 # TODO: is there a way to just walk the files in the commit?
271 if any(
272 if any(
272 ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
273 ctx[f].islfs()
273 ):
274 for f in ctx.files()
274 repo.requirements.add(b'lfs')
275 if f in ctx and match(f)
275 repo.features.add(repository.REPO_FEATURE_LFS)
276 ):
276 scmutil.writereporequirements(repo)
277 repo.requirements.add(b'lfs')
277 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
278 repo.features.add(repository.REPO_FEATURE_LFS)
278 break
279 scmutil.writereporequirements(repo)
280 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
281 break
279
282
280 ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
283 ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
281 ui.setconfig(
284 ui.setconfig(
282 b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
285 b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
283 )
286 )
284 else:
287 else:
285 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
288 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
286
289
287
290
288 def _trackedmatcher(repo):
291 def _trackedmatcher(repo):
289 """Return a function (path, size) -> bool indicating whether or not to
292 """Return a function (path, size) -> bool indicating whether or not to
290 track a given file with lfs."""
293 track a given file with lfs."""
291 if not repo.wvfs.exists(b'.hglfs'):
294 if not repo.wvfs.exists(b'.hglfs'):
292 # No '.hglfs' in wdir. Fallback to config for now.
295 # No '.hglfs' in wdir. Fallback to config for now.
293 trackspec = repo.ui.config(b'lfs', b'track')
296 trackspec = repo.ui.config(b'lfs', b'track')
294
297
295 # deprecated config: lfs.threshold
298 # deprecated config: lfs.threshold
296 threshold = repo.ui.configbytes(b'lfs', b'threshold')
299 threshold = repo.ui.configbytes(b'lfs', b'threshold')
297 if threshold:
300 if threshold:
298 filesetlang.parse(trackspec) # make sure syntax errors are confined
301 filesetlang.parse(trackspec) # make sure syntax errors are confined
299 trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
302 trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
300
303
301 return minifileset.compile(trackspec)
304 return minifileset.compile(trackspec)
302
305
303 data = repo.wvfs.tryread(b'.hglfs')
306 data = repo.wvfs.tryread(b'.hglfs')
304 if not data:
307 if not data:
305 return lambda p, s: False
308 return lambda p, s: False
306
309
307 # Parse errors here will abort with a message that points to the .hglfs file
310 # Parse errors here will abort with a message that points to the .hglfs file
308 # and line number.
311 # and line number.
309 cfg = config.config()
312 cfg = config.config()
310 cfg.parse(b'.hglfs', data)
313 cfg.parse(b'.hglfs', data)
311
314
312 try:
315 try:
313 rules = [
316 rules = [
314 (minifileset.compile(pattern), minifileset.compile(rule))
317 (minifileset.compile(pattern), minifileset.compile(rule))
315 for pattern, rule in cfg.items(b'track')
318 for pattern, rule in cfg.items(b'track')
316 ]
319 ]
317 except error.ParseError as e:
320 except error.ParseError as e:
318 # The original exception gives no indicator that the error is in the
321 # The original exception gives no indicator that the error is in the
319 # .hglfs file, so add that.
322 # .hglfs file, so add that.
320
323
321 # TODO: See if the line number of the file can be made available.
324 # TODO: See if the line number of the file can be made available.
322 raise error.Abort(_(b'parse error in .hglfs: %s') % e)
325 raise error.Abort(_(b'parse error in .hglfs: %s') % e)
323
326
324 def _match(path, size):
327 def _match(path, size):
325 for pat, rule in rules:
328 for pat, rule in rules:
326 if pat(path, size):
329 if pat(path, size):
327 return rule(path, size)
330 return rule(path, size)
328
331
329 return False
332 return False
330
333
331 return _match
334 return _match
332
335
333
336
334 # Called by remotefilelog
337 # Called by remotefilelog
335 def wrapfilelog(filelog):
338 def wrapfilelog(filelog):
336 wrapfunction = extensions.wrapfunction
339 wrapfunction = extensions.wrapfunction
337
340
338 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
341 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
339 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
342 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
340 wrapfunction(filelog, 'size', wrapper.filelogsize)
343 wrapfunction(filelog, 'size', wrapper.filelogsize)
341
344
342
345
343 @eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
346 @eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
344 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
347 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
345 opts = orig(ui, requirements, features)
348 opts = orig(ui, requirements, features)
346 for name, module in extensions.extensions(ui):
349 for name, module in extensions.extensions(ui):
347 if module is sys.modules[__name__]:
350 if module is sys.modules[__name__]:
348 if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
351 if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
349 msg = (
352 msg = (
350 _(b"cannot register multiple processors on flag '%#x'.")
353 _(b"cannot register multiple processors on flag '%#x'.")
351 % revlog.REVIDX_EXTSTORED
354 % revlog.REVIDX_EXTSTORED
352 )
355 )
353 raise error.Abort(msg)
356 raise error.Abort(msg)
354
357
355 opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
358 opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
356 break
359 break
357
360
358 return opts
361 return opts
359
362
360
363
361 @eh.extsetup
364 @eh.extsetup
362 def _extsetup(ui):
365 def _extsetup(ui):
363 wrapfilelog(filelog.filelog)
366 wrapfilelog(filelog.filelog)
364
367
365 context.basefilectx.islfs = wrapper.filectxislfs
368 context.basefilectx.islfs = wrapper.filectxislfs
366
369
367 scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
370 scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
368
371
369 # Make bundle choose changegroup3 instead of changegroup2. This affects
372 # Make bundle choose changegroup3 instead of changegroup2. This affects
370 # "hg bundle" command. Note: it does not cover all bundle formats like
373 # "hg bundle" command. Note: it does not cover all bundle formats like
371 # "packed1". Using "packed1" with lfs will likely cause trouble.
374 # "packed1". Using "packed1" with lfs will likely cause trouble.
372 bundlecaches._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
375 bundlecaches._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
373
376
374
377
375 @eh.filesetpredicate(b'lfs()')
378 @eh.filesetpredicate(b'lfs()')
376 def lfsfileset(mctx, x):
379 def lfsfileset(mctx, x):
377 """File that uses LFS storage."""
380 """File that uses LFS storage."""
378 # i18n: "lfs" is a keyword
381 # i18n: "lfs" is a keyword
379 filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
382 filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
380 ctx = mctx.ctx
383 ctx = mctx.ctx
381
384
382 def lfsfilep(f):
385 def lfsfilep(f):
383 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
386 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
384
387
385 return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
388 return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
386
389
387
390
388 @eh.templatekeyword(b'lfs_files', requires={b'ctx'})
391 @eh.templatekeyword(b'lfs_files', requires={b'ctx'})
389 def lfsfiles(context, mapping):
392 def lfsfiles(context, mapping):
390 """List of strings. All files modified, added, or removed by this
393 """List of strings. All files modified, added, or removed by this
391 changeset."""
394 changeset."""
392 ctx = context.resource(mapping, b'ctx')
395 ctx = context.resource(mapping, b'ctx')
393
396
394 pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
397 pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
395 files = sorted(pointers.keys())
398 files = sorted(pointers.keys())
396
399
397 def pointer(v):
400 def pointer(v):
398 # In the file spec, version is first and the other keys are sorted.
401 # In the file spec, version is first and the other keys are sorted.
399 sortkeyfunc = lambda x: (x[0] != b'version', x)
402 sortkeyfunc = lambda x: (x[0] != b'version', x)
400 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
403 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
401 return util.sortdict(items)
404 return util.sortdict(items)
402
405
403 makemap = lambda v: {
406 makemap = lambda v: {
404 b'file': v,
407 b'file': v,
405 b'lfsoid': pointers[v].oid() if pointers[v] else None,
408 b'lfsoid': pointers[v].oid() if pointers[v] else None,
406 b'lfspointer': templateutil.hybriddict(pointer(v)),
409 b'lfspointer': templateutil.hybriddict(pointer(v)),
407 }
410 }
408
411
409 # TODO: make the separator ', '?
412 # TODO: make the separator ', '?
410 f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
413 f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
411 return templateutil.hybrid(f, files, makemap, pycompat.identity)
414 return templateutil.hybrid(f, files, makemap, pycompat.identity)
412
415
413
416
414 @eh.command(
417 @eh.command(
415 b'debuglfsupload',
418 b'debuglfsupload',
416 [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
419 [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
417 )
420 )
418 def debuglfsupload(ui, repo, **opts):
421 def debuglfsupload(ui, repo, **opts):
419 """upload lfs blobs added by the working copy parent or given revisions"""
422 """upload lfs blobs added by the working copy parent or given revisions"""
420 revs = opts.get('rev', [])
423 revs = opts.get('rev', [])
421 pointers = wrapper.extractpointers(repo, logcmdutil.revrange(repo, revs))
424 pointers = wrapper.extractpointers(repo, logcmdutil.revrange(repo, revs))
422 wrapper.uploadblobs(repo, pointers)
425 wrapper.uploadblobs(repo, pointers)
423
426
424
427
425 @eh.wrapcommand(
428 @eh.wrapcommand(
426 b'verify',
429 b'verify',
427 opts=[(b'', b'no-lfs', None, _(b'skip missing lfs blob content'))],
430 opts=[(b'', b'no-lfs', None, _(b'skip missing lfs blob content'))],
428 )
431 )
429 def verify(orig, ui, repo, **opts):
432 def verify(orig, ui, repo, **opts):
430 skipflags = repo.ui.configint(b'verify', b'skipflags')
433 skipflags = repo.ui.configint(b'verify', b'skipflags')
431 no_lfs = opts.pop('no_lfs')
434 no_lfs = opts.pop('no_lfs')
432
435
433 if skipflags:
436 if skipflags:
434 # --lfs overrides the config bit, if set.
437 # --lfs overrides the config bit, if set.
435 if no_lfs is False:
438 if no_lfs is False:
436 skipflags &= ~repository.REVISION_FLAG_EXTSTORED
439 skipflags &= ~repository.REVISION_FLAG_EXTSTORED
437 else:
440 else:
438 skipflags = 0
441 skipflags = 0
439
442
440 if no_lfs is True:
443 if no_lfs is True:
441 skipflags |= repository.REVISION_FLAG_EXTSTORED
444 skipflags |= repository.REVISION_FLAG_EXTSTORED
442
445
443 with ui.configoverride({(b'verify', b'skipflags'): skipflags}):
446 with ui.configoverride({(b'verify', b'skipflags'): skipflags}):
444 return orig(ui, repo, **opts)
447 return orig(ui, repo, **opts)
@@ -1,76 +1,76 b''
1 # narrowdirstate.py - extensions to mercurial dirstate to support narrow clones
1 # narrowdirstate.py - extensions to mercurial dirstate to support narrow clones
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import error
11 from mercurial import error
12
12
13
13
14 def wrapdirstate(repo, dirstate):
14 def wrapdirstate(repo, dirstate):
15 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
15 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
16
16
17 def _editfunc(fn):
17 def _editfunc(fn):
18 def _wrapper(self, *args, **kwargs):
18 def _wrapper(self, *args, **kwargs):
19 narrowmatch = repo.narrowmatch()
19 narrowmatch = repo.narrowmatch()
20 for f in args:
20 for f in args:
21 if f is not None and not narrowmatch(f) and f not in self:
21 if f is not None and not narrowmatch(f) and f not in self:
22 raise error.Abort(
22 raise error.Abort(
23 _(
23 _(
24 b"cannot track '%s' - it is outside "
24 b"cannot track '%s' - it is outside "
25 + b"the narrow clone"
25 + b"the narrow clone"
26 )
26 )
27 % f
27 % f
28 )
28 )
29 return fn(self, *args, **kwargs)
29 return fn(self, *args, **kwargs)
30
30
31 return _wrapper
31 return _wrapper
32
32
33 class narrowdirstate(dirstate.__class__):
33 class narrowdirstate(dirstate.__class__):
34 # Prevent adding/editing/copying/deleting files that are outside the
34 # Prevent adding/editing/copying/deleting files that are outside the
35 # sparse checkout
35 # sparse checkout
36 @_editfunc
36 @_editfunc
37 def normal(self, *args, **kwargs):
37 def normal(self, *args, **kwargs):
38 return super(narrowdirstate, self).normal(*args, **kwargs)
38 return super(narrowdirstate, self).normal(*args, **kwargs)
39
39
40 @_editfunc
40 @_editfunc
41 def set_tracked(self, *args):
41 def set_tracked(self, *args, **kwargs):
42 return super(narrowdirstate, self).set_tracked(*args)
42 return super(narrowdirstate, self).set_tracked(*args, **kwargs)
43
43
44 @_editfunc
44 @_editfunc
45 def set_untracked(self, *args):
45 def set_untracked(self, *args):
46 return super(narrowdirstate, self).set_untracked(*args)
46 return super(narrowdirstate, self).set_untracked(*args)
47
47
48 @_editfunc
48 @_editfunc
49 def add(self, *args):
49 def add(self, *args):
50 return super(narrowdirstate, self).add(*args)
50 return super(narrowdirstate, self).add(*args)
51
51
52 @_editfunc
52 @_editfunc
53 def normallookup(self, *args):
53 def normallookup(self, *args):
54 return super(narrowdirstate, self).normallookup(*args)
54 return super(narrowdirstate, self).normallookup(*args)
55
55
56 @_editfunc
56 @_editfunc
57 def copy(self, *args):
57 def copy(self, *args):
58 return super(narrowdirstate, self).copy(*args)
58 return super(narrowdirstate, self).copy(*args)
59
59
60 @_editfunc
60 @_editfunc
61 def remove(self, *args):
61 def remove(self, *args):
62 return super(narrowdirstate, self).remove(*args)
62 return super(narrowdirstate, self).remove(*args)
63
63
64 @_editfunc
64 @_editfunc
65 def merge(self, *args):
65 def merge(self, *args):
66 return super(narrowdirstate, self).merge(*args)
66 return super(narrowdirstate, self).merge(*args)
67
67
68 def rebuild(self, parent, allfiles, changedfiles=None):
68 def rebuild(self, parent, allfiles, changedfiles=None):
69 if changedfiles is None:
69 if changedfiles is None:
70 # Rebuilding entire dirstate, let's filter allfiles to match the
70 # Rebuilding entire dirstate, let's filter allfiles to match the
71 # narrowspec.
71 # narrowspec.
72 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
72 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
73 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
73 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
74
74
75 dirstate.__class__ = narrowdirstate
75 dirstate.__class__ = narrowdirstate
76 return dirstate
76 return dirstate
@@ -1,656 +1,659 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for sending email push notifications
8 '''hooks for sending email push notifications
9
9
10 This extension implements hooks to send email notifications when
10 This extension implements hooks to send email notifications when
11 changesets are sent from or received by the local repository.
11 changesets are sent from or received by the local repository.
12
12
13 First, enable the extension as explained in :hg:`help extensions`, and
13 First, enable the extension as explained in :hg:`help extensions`, and
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
15 are run when changesets are received, while ``outgoing`` hooks are for
15 are run when changesets are received, while ``outgoing`` hooks are for
16 changesets sent to another repository::
16 changesets sent to another repository::
17
17
18 [hooks]
18 [hooks]
19 # one email for each incoming changeset
19 # one email for each incoming changeset
20 incoming.notify = python:hgext.notify.hook
20 incoming.notify = python:hgext.notify.hook
21 # one email for all incoming changesets
21 # one email for all incoming changesets
22 changegroup.notify = python:hgext.notify.hook
22 changegroup.notify = python:hgext.notify.hook
23
23
24 # one email for all outgoing changesets
24 # one email for all outgoing changesets
25 outgoing.notify = python:hgext.notify.hook
25 outgoing.notify = python:hgext.notify.hook
26
26
27 This registers the hooks. To enable notification, subscribers must
27 This registers the hooks. To enable notification, subscribers must
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
29 repositories to a given recipient. The ``[reposubs]`` section maps
29 repositories to a given recipient. The ``[reposubs]`` section maps
30 multiple recipients to a single repository::
30 multiple recipients to a single repository::
31
31
32 [usersubs]
32 [usersubs]
33 # key is subscriber email, value is a comma-separated list of repo patterns
33 # key is subscriber email, value is a comma-separated list of repo patterns
34 user@host = pattern
34 user@host = pattern
35
35
36 [reposubs]
36 [reposubs]
37 # key is repo pattern, value is a comma-separated list of subscriber emails
37 # key is repo pattern, value is a comma-separated list of subscriber emails
38 pattern = user@host
38 pattern = user@host
39
39
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
41 optionally combined with a revset expression. A revset expression, if
41 optionally combined with a revset expression. A revset expression, if
42 present, is separated from the glob by a hash. Example::
42 present, is separated from the glob by a hash. Example::
43
43
44 [reposubs]
44 [reposubs]
45 */widgets#branch(release) = qa-team@example.com
45 */widgets#branch(release) = qa-team@example.com
46
46
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
48 branch triggers a notification in any repository ending in ``widgets``.
48 branch triggers a notification in any repository ending in ``widgets``.
49
49
50 In order to place them under direct user management, ``[usersubs]`` and
50 In order to place them under direct user management, ``[usersubs]`` and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
52 incorporated by reference::
52 incorporated by reference::
53
53
54 [notify]
54 [notify]
55 config = /path/to/subscriptionsfile
55 config = /path/to/subscriptionsfile
56
56
57 Notifications will not be sent until the ``notify.test`` value is set
57 Notifications will not be sent until the ``notify.test`` value is set
58 to ``False``; see below.
58 to ``False``; see below.
59
59
60 Notifications content can be tweaked with the following configuration entries:
60 Notifications content can be tweaked with the following configuration entries:
61
61
62 notify.test
62 notify.test
63 If ``True``, print messages to stdout instead of sending them. Default: True.
63 If ``True``, print messages to stdout instead of sending them. Default: True.
64
64
65 notify.sources
65 notify.sources
66 Space-separated list of change sources. Notifications are activated only
66 Space-separated list of change sources. Notifications are activated only
67 when a changeset's source is in this list. Sources may be:
67 when a changeset's source is in this list. Sources may be:
68
68
69 :``serve``: changesets received via http or ssh
69 :``serve``: changesets received via http or ssh
70 :``pull``: changesets received via ``hg pull``
70 :``pull``: changesets received via ``hg pull``
71 :``unbundle``: changesets received via ``hg unbundle``
71 :``unbundle``: changesets received via ``hg unbundle``
72 :``push``: changesets sent or received via ``hg push``
72 :``push``: changesets sent or received via ``hg push``
73 :``bundle``: changesets sent via ``hg unbundle``
73 :``bundle``: changesets sent via ``hg unbundle``
74
74
75 Default: serve.
75 Default: serve.
76
76
77 notify.strip
77 notify.strip
78 Number of leading slashes to strip from url paths. By default, notifications
78 Number of leading slashes to strip from url paths. By default, notifications
79 reference repositories with their absolute path. ``notify.strip`` lets you
79 reference repositories with their absolute path. ``notify.strip`` lets you
80 turn them into relative paths. For example, ``notify.strip=3`` will change
80 turn them into relative paths. For example, ``notify.strip=3`` will change
81 ``/long/path/repository`` into ``repository``. Default: 0.
81 ``/long/path/repository`` into ``repository``. Default: 0.
82
82
83 notify.domain
83 notify.domain
84 Default email domain for sender or recipients with no explicit domain.
84 Default email domain for sender or recipients with no explicit domain.
85 It is also used for the domain part of the ``Message-Id`` when using
85 It is also used for the domain part of the ``Message-Id`` when using
86 ``notify.messageidseed``.
86 ``notify.messageidseed``.
87
87
88 notify.messageidseed
88 notify.messageidseed
89 Create deterministic ``Message-Id`` headers for the mails based on the seed
89 Create deterministic ``Message-Id`` headers for the mails based on the seed
90 and the revision identifier of the first commit in the changeset.
90 and the revision identifier of the first commit in the changeset.
91
91
92 notify.style
92 notify.style
93 Style file to use when formatting emails.
93 Style file to use when formatting emails.
94
94
95 notify.template
95 notify.template
96 Template to use when formatting emails.
96 Template to use when formatting emails.
97
97
98 notify.incoming
98 notify.incoming
99 Template to use when run as an incoming hook, overriding ``notify.template``.
99 Template to use when run as an incoming hook, overriding ``notify.template``.
100
100
101 notify.outgoing
101 notify.outgoing
102 Template to use when run as an outgoing hook, overriding ``notify.template``.
102 Template to use when run as an outgoing hook, overriding ``notify.template``.
103
103
104 notify.changegroup
104 notify.changegroup
105 Template to use when running as a changegroup hook, overriding
105 Template to use when running as a changegroup hook, overriding
106 ``notify.template``.
106 ``notify.template``.
107
107
108 notify.maxdiff
108 notify.maxdiff
109 Maximum number of diff lines to include in notification email. Set to 0
109 Maximum number of diff lines to include in notification email. Set to 0
110 to disable the diff, or -1 to include all of it. Default: 300.
110 to disable the diff, or -1 to include all of it. Default: 300.
111
111
112 notify.maxdiffstat
112 notify.maxdiffstat
113 Maximum number of diffstat lines to include in notification email. Set to -1
113 Maximum number of diffstat lines to include in notification email. Set to -1
114 to include all of it. Default: -1.
114 to include all of it. Default: -1.
115
115
116 notify.maxsubject
116 notify.maxsubject
117 Maximum number of characters in email's subject line. Default: 67.
117 Maximum number of characters in email's subject line. Default: 67.
118
118
119 notify.diffstat
119 notify.diffstat
120 Set to True to include a diffstat before diff content. Default: True.
120 Set to True to include a diffstat before diff content. Default: True.
121
121
122 notify.showfunc
122 notify.showfunc
123 If set, override ``diff.showfunc`` for the diff content. Default: None.
123 If set, override ``diff.showfunc`` for the diff content. Default: None.
124
124
125 notify.merge
125 notify.merge
126 If True, send notifications for merge changesets. Default: True.
126 If True, send notifications for merge changesets. Default: True.
127
127
128 notify.mbox
128 notify.mbox
129 If set, append mails to this mbox file instead of sending. Default: None.
129 If set, append mails to this mbox file instead of sending. Default: None.
130
130
131 notify.fromauthor
131 notify.fromauthor
132 If set, use the committer of the first changeset in a changegroup for
132 If set, use the committer of the first changeset in a changegroup for
133 the "From" field of the notification mail. If not set, take the user
133 the "From" field of the notification mail. If not set, take the user
134 from the pushing repo. Default: False.
134 from the pushing repo. Default: False.
135
135
136 notify.reply-to-predecessor (EXPERIMENTAL)
136 notify.reply-to-predecessor (EXPERIMENTAL)
137 If set and the changeset has a predecessor in the repository, try to thread
137 If set and the changeset has a predecessor in the repository, try to thread
138 the notification mail with the predecessor. This adds the "In-Reply-To" header
138 the notification mail with the predecessor. This adds the "In-Reply-To" header
139 to the notification mail with a reference to the predecessor with the smallest
139 to the notification mail with a reference to the predecessor with the smallest
140 revision number. Mail threads can still be torn, especially when changesets
140 revision number. Mail threads can still be torn, especially when changesets
141 are folded.
141 are folded.
142
142
143 This option must be used in combination with ``notify.messageidseed``.
143 This option must be used in combination with ``notify.messageidseed``.
144
144
145 If set, the following entries will also be used to customize the
145 If set, the following entries will also be used to customize the
146 notifications:
146 notifications:
147
147
148 email.from
148 email.from
149 Email ``From`` address to use if none can be found in the generated
149 Email ``From`` address to use if none can be found in the generated
150 email content.
150 email content.
151
151
152 web.baseurl
152 web.baseurl
153 Root repository URL to combine with repository paths when making
153 Root repository URL to combine with repository paths when making
154 references. See also ``notify.strip``.
154 references. See also ``notify.strip``.
155
155
156 '''
156 '''
157 from __future__ import absolute_import
157 from __future__ import absolute_import
158
158
159 import email.errors as emailerrors
159 import email.errors as emailerrors
160 import email.utils as emailutils
160 import email.utils as emailutils
161 import fnmatch
161 import fnmatch
162 import hashlib
162 import hashlib
163 import socket
163 import socket
164 import time
164 import time
165
165
166 from mercurial.i18n import _
166 from mercurial.i18n import _
167 from mercurial import (
167 from mercurial import (
168 encoding,
168 encoding,
169 error,
169 error,
170 logcmdutil,
170 logcmdutil,
171 mail,
171 mail,
172 obsutil,
172 obsutil,
173 patch,
173 patch,
174 pycompat,
174 pycompat,
175 registrar,
175 registrar,
176 util,
176 util,
177 )
177 )
178 from mercurial.utils import (
178 from mercurial.utils import (
179 dateutil,
179 dateutil,
180 stringutil,
180 stringutil,
181 )
181 )
182
182
183 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
183 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
184 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
184 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
185 # be specifying the version(s) of Mercurial they are tested with, or
185 # be specifying the version(s) of Mercurial they are tested with, or
186 # leave the attribute unspecified.
186 # leave the attribute unspecified.
187 testedwith = b'ships-with-hg-core'
187 testedwith = b'ships-with-hg-core'
188
188
189 configtable = {}
189 configtable = {}
190 configitem = registrar.configitem(configtable)
190 configitem = registrar.configitem(configtable)
191
191
192 configitem(
192 configitem(
193 b'notify',
193 b'notify',
194 b'changegroup',
194 b'changegroup',
195 default=None,
195 default=None,
196 )
196 )
197 configitem(
197 configitem(
198 b'notify',
198 b'notify',
199 b'config',
199 b'config',
200 default=None,
200 default=None,
201 )
201 )
202 configitem(
202 configitem(
203 b'notify',
203 b'notify',
204 b'diffstat',
204 b'diffstat',
205 default=True,
205 default=True,
206 )
206 )
207 configitem(
207 configitem(
208 b'notify',
208 b'notify',
209 b'domain',
209 b'domain',
210 default=None,
210 default=None,
211 )
211 )
212 configitem(
212 configitem(
213 b'notify',
213 b'notify',
214 b'messageidseed',
214 b'messageidseed',
215 default=None,
215 default=None,
216 )
216 )
217 configitem(
217 configitem(
218 b'notify',
218 b'notify',
219 b'fromauthor',
219 b'fromauthor',
220 default=None,
220 default=None,
221 )
221 )
222 configitem(
222 configitem(
223 b'notify',
223 b'notify',
224 b'incoming',
224 b'incoming',
225 default=None,
225 default=None,
226 )
226 )
227 configitem(
227 configitem(
228 b'notify',
228 b'notify',
229 b'maxdiff',
229 b'maxdiff',
230 default=300,
230 default=300,
231 )
231 )
232 configitem(
232 configitem(
233 b'notify',
233 b'notify',
234 b'maxdiffstat',
234 b'maxdiffstat',
235 default=-1,
235 default=-1,
236 )
236 )
237 configitem(
237 configitem(
238 b'notify',
238 b'notify',
239 b'maxsubject',
239 b'maxsubject',
240 default=67,
240 default=67,
241 )
241 )
242 configitem(
242 configitem(
243 b'notify',
243 b'notify',
244 b'mbox',
244 b'mbox',
245 default=None,
245 default=None,
246 )
246 )
247 configitem(
247 configitem(
248 b'notify',
248 b'notify',
249 b'merge',
249 b'merge',
250 default=True,
250 default=True,
251 )
251 )
252 configitem(
252 configitem(
253 b'notify',
253 b'notify',
254 b'outgoing',
254 b'outgoing',
255 default=None,
255 default=None,
256 )
256 )
257 configitem(
257 configitem(
258 b'notify',
258 b'notify',
259 b'reply-to-predecessor',
259 b'reply-to-predecessor',
260 default=False,
260 default=False,
261 )
261 )
262 configitem(
262 configitem(
263 b'notify',
263 b'notify',
264 b'sources',
264 b'sources',
265 default=b'serve',
265 default=b'serve',
266 )
266 )
267 configitem(
267 configitem(
268 b'notify',
268 b'notify',
269 b'showfunc',
269 b'showfunc',
270 default=None,
270 default=None,
271 )
271 )
272 configitem(
272 configitem(
273 b'notify',
273 b'notify',
274 b'strip',
274 b'strip',
275 default=0,
275 default=0,
276 )
276 )
277 configitem(
277 configitem(
278 b'notify',
278 b'notify',
279 b'style',
279 b'style',
280 default=None,
280 default=None,
281 )
281 )
282 configitem(
282 configitem(
283 b'notify',
283 b'notify',
284 b'template',
284 b'template',
285 default=None,
285 default=None,
286 )
286 )
287 configitem(
287 configitem(
288 b'notify',
288 b'notify',
289 b'test',
289 b'test',
290 default=True,
290 default=True,
291 )
291 )
292
292
293 # template for single changeset can include email headers.
293 # template for single changeset can include email headers.
294 single_template = b'''
294 single_template = b'''
295 Subject: changeset in {webroot}: {desc|firstline|strip}
295 Subject: changeset in {webroot}: {desc|firstline|strip}
296 From: {author}
296 From: {author}
297
297
298 changeset {node|short} in {root}
298 changeset {node|short} in {root}
299 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
299 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
300 description:
300 description:
301 \t{desc|tabindent|strip}
301 \t{desc|tabindent|strip}
302 '''.lstrip()
302 '''.lstrip()
303
303
304 # template for multiple changesets should not contain email headers,
304 # template for multiple changesets should not contain email headers,
305 # because only first set of headers will be used and result will look
305 # because only first set of headers will be used and result will look
306 # strange.
306 # strange.
307 multiple_template = b'''
307 multiple_template = b'''
308 changeset {node|short} in {root}
308 changeset {node|short} in {root}
309 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
309 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
310 summary: {desc|firstline}
310 summary: {desc|firstline}
311 '''
311 '''
312
312
313 deftemplates = {
313 deftemplates = {
314 b'changegroup': multiple_template,
314 b'changegroup': multiple_template,
315 }
315 }
316
316
317
317
318 class notifier(object):
318 class notifier(object):
319 '''email notification class.'''
319 '''email notification class.'''
320
320
321 def __init__(self, ui, repo, hooktype):
321 def __init__(self, ui, repo, hooktype):
322 self.ui = ui
322 self.ui = ui
323 cfg = self.ui.config(b'notify', b'config')
323 cfg = self.ui.config(b'notify', b'config')
324 if cfg:
324 if cfg:
325 self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs'])
325 self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs'])
326 self.repo = repo
326 self.repo = repo
327 self.stripcount = int(self.ui.config(b'notify', b'strip'))
327 self.stripcount = int(self.ui.config(b'notify', b'strip'))
328 self.root = self.strip(self.repo.root)
328 self.root = self.strip(self.repo.root)
329 self.domain = self.ui.config(b'notify', b'domain')
329 self.domain = self.ui.config(b'notify', b'domain')
330 self.mbox = self.ui.config(b'notify', b'mbox')
330 self.mbox = self.ui.config(b'notify', b'mbox')
331 self.test = self.ui.configbool(b'notify', b'test')
331 self.test = self.ui.configbool(b'notify', b'test')
332 self.charsets = mail._charsets(self.ui)
332 self.charsets = mail._charsets(self.ui)
333 self.subs = self.subscribers()
333 self.subs = self.subscribers()
334 self.merge = self.ui.configbool(b'notify', b'merge')
334 self.merge = self.ui.configbool(b'notify', b'merge')
335 self.showfunc = self.ui.configbool(b'notify', b'showfunc')
335 self.showfunc = self.ui.configbool(b'notify', b'showfunc')
336 self.messageidseed = self.ui.config(b'notify', b'messageidseed')
336 self.messageidseed = self.ui.config(b'notify', b'messageidseed')
337 self.reply = self.ui.configbool(b'notify', b'reply-to-predecessor')
337 self.reply = self.ui.configbool(b'notify', b'reply-to-predecessor')
338
338
339 if self.reply and not self.messageidseed:
339 if self.reply and not self.messageidseed:
340 raise error.Abort(
340 raise error.Abort(
341 _(
341 _(
342 b'notify.reply-to-predecessor used without '
342 b'notify.reply-to-predecessor used without '
343 b'notify.messageidseed'
343 b'notify.messageidseed'
344 )
344 )
345 )
345 )
346
346
347 if self.showfunc is None:
347 if self.showfunc is None:
348 self.showfunc = self.ui.configbool(b'diff', b'showfunc')
348 self.showfunc = self.ui.configbool(b'diff', b'showfunc')
349
349
350 mapfile = None
350 mapfile = None
351 template = self.ui.config(b'notify', hooktype) or self.ui.config(
351 template = self.ui.config(b'notify', hooktype) or self.ui.config(
352 b'notify', b'template'
352 b'notify', b'template'
353 )
353 )
354 if not template:
354 if not template:
355 mapfile = self.ui.config(b'notify', b'style')
355 mapfile = self.ui.config(b'notify', b'style')
356 if not mapfile and not template:
356 if not mapfile and not template:
357 template = deftemplates.get(hooktype) or single_template
357 template = deftemplates.get(hooktype) or single_template
358 spec = logcmdutil.templatespec(template, mapfile)
358 spec = logcmdutil.templatespec(template, mapfile)
359 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
359 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
360
360
361 def strip(self, path):
361 def strip(self, path):
362 '''strip leading slashes from local path, turn into web-safe path.'''
362 '''strip leading slashes from local path, turn into web-safe path.'''
363
363
364 path = util.pconvert(path)
364 path = util.pconvert(path)
365 count = self.stripcount
365 count = self.stripcount
366 while count > 0:
366 while count > 0:
367 c = path.find(b'/')
367 c = path.find(b'/')
368 if c == -1:
368 if c == -1:
369 break
369 break
370 path = path[c + 1 :]
370 path = path[c + 1 :]
371 count -= 1
371 count -= 1
372 return path
372 return path
373
373
374 def fixmail(self, addr):
374 def fixmail(self, addr):
375 '''try to clean up email addresses.'''
375 '''try to clean up email addresses.'''
376
376
377 addr = stringutil.email(addr.strip())
377 addr = stringutil.email(addr.strip())
378 if self.domain:
378 if self.domain:
379 a = addr.find(b'@localhost')
379 a = addr.find(b'@localhost')
380 if a != -1:
380 if a != -1:
381 addr = addr[:a]
381 addr = addr[:a]
382 if b'@' not in addr:
382 if b'@' not in addr:
383 return addr + b'@' + self.domain
383 return addr + b'@' + self.domain
384 return addr
384 return addr
385
385
386 def subscribers(self):
386 def subscribers(self):
387 '''return list of email addresses of subscribers to this repo.'''
387 '''return list of email addresses of subscribers to this repo.'''
388 subs = set()
388 subs = set()
389 for user, pats in self.ui.configitems(b'usersubs'):
389 for user, pats in self.ui.configitems(b'usersubs'):
390 for pat in pats.split(b','):
390 for pat in pats.split(b','):
391 if b'#' in pat:
391 if b'#' in pat:
392 pat, revs = pat.split(b'#', 1)
392 pat, revs = pat.split(b'#', 1)
393 else:
393 else:
394 revs = None
394 revs = None
395 if fnmatch.fnmatch(self.repo.root, pat.strip()):
395 if fnmatch.fnmatch(self.repo.root, pat.strip()):
396 subs.add((self.fixmail(user), revs))
396 subs.add((self.fixmail(user), revs))
397 for pat, users in self.ui.configitems(b'reposubs'):
397 for pat, users in self.ui.configitems(b'reposubs'):
398 if b'#' in pat:
398 if b'#' in pat:
399 pat, revs = pat.split(b'#', 1)
399 pat, revs = pat.split(b'#', 1)
400 else:
400 else:
401 revs = None
401 revs = None
402 if fnmatch.fnmatch(self.repo.root, pat):
402 if fnmatch.fnmatch(self.repo.root, pat):
403 for user in users.split(b','):
403 for user in users.split(b','):
404 subs.add((self.fixmail(user), revs))
404 subs.add((self.fixmail(user), revs))
405 return [
405 return [
406 (mail.addressencode(self.ui, s, self.charsets, self.test), r)
406 (mail.addressencode(self.ui, s, self.charsets, self.test), r)
407 for s, r in sorted(subs)
407 for s, r in sorted(subs)
408 ]
408 ]
409
409
410 def node(self, ctx, **props):
410 def node(self, ctx, **props):
411 '''format one changeset, unless it is a suppressed merge.'''
411 '''format one changeset, unless it is a suppressed merge.'''
412 if not self.merge and len(ctx.parents()) > 1:
412 if not self.merge and len(ctx.parents()) > 1:
413 return False
413 return False
414 self.t.show(
414 self.t.show(
415 ctx,
415 ctx,
416 changes=ctx.changeset(),
416 changes=ctx.changeset(),
417 baseurl=self.ui.config(b'web', b'baseurl'),
417 baseurl=self.ui.config(b'web', b'baseurl'),
418 root=self.repo.root,
418 root=self.repo.root,
419 webroot=self.root,
419 webroot=self.root,
420 **props
420 **props
421 )
421 )
422 return True
422 return True
423
423
424 def skipsource(self, source):
424 def skipsource(self, source):
425 '''true if incoming changes from this source should be skipped.'''
425 '''true if incoming changes from this source should be skipped.'''
426 ok_sources = self.ui.config(b'notify', b'sources').split()
426 ok_sources = self.ui.config(b'notify', b'sources').split()
427 return source not in ok_sources
427 return source not in ok_sources
428
428
429 def send(self, ctx, count, data):
429 def send(self, ctx, count, data):
430 '''send message.'''
430 '''send message.'''
431
431
432 # Select subscribers by revset
432 # Select subscribers by revset
433 subs = set()
433 subs = set()
434 for sub, spec in self.subs:
434 for sub, spec in self.subs:
435 if spec is None:
435 if spec is None:
436 subs.add(sub)
436 subs.add(sub)
437 continue
437 continue
438 revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
438 try:
439 revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
440 except error.RepoLookupError:
441 continue
439 if len(revs):
442 if len(revs):
440 subs.add(sub)
443 subs.add(sub)
441 continue
444 continue
442 if len(subs) == 0:
445 if len(subs) == 0:
443 self.ui.debug(
446 self.ui.debug(
444 b'notify: no subscribers to selected repo and revset\n'
447 b'notify: no subscribers to selected repo and revset\n'
445 )
448 )
446 return
449 return
447
450
448 try:
451 try:
449 msg = mail.parsebytes(data)
452 msg = mail.parsebytes(data)
450 except emailerrors.MessageParseError as inst:
453 except emailerrors.MessageParseError as inst:
451 raise error.Abort(inst)
454 raise error.Abort(inst)
452
455
453 # store sender and subject
456 # store sender and subject
454 sender = msg['From']
457 sender = msg['From']
455 subject = msg['Subject']
458 subject = msg['Subject']
456 if sender is not None:
459 if sender is not None:
457 sender = mail.headdecode(sender)
460 sender = mail.headdecode(sender)
458 if subject is not None:
461 if subject is not None:
459 subject = mail.headdecode(subject)
462 subject = mail.headdecode(subject)
460 del msg['From'], msg['Subject']
463 del msg['From'], msg['Subject']
461
464
462 if not msg.is_multipart():
465 if not msg.is_multipart():
463 # create fresh mime message from scratch
466 # create fresh mime message from scratch
464 # (multipart templates must take care of this themselves)
467 # (multipart templates must take care of this themselves)
465 headers = msg.items()
468 headers = msg.items()
466 payload = msg.get_payload(decode=pycompat.ispy3)
469 payload = msg.get_payload(decode=pycompat.ispy3)
467 # for notification prefer readability over data precision
470 # for notification prefer readability over data precision
468 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
471 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
469 # reinstate custom headers
472 # reinstate custom headers
470 for k, v in headers:
473 for k, v in headers:
471 msg[k] = v
474 msg[k] = v
472
475
473 msg['Date'] = encoding.strfromlocal(
476 msg['Date'] = encoding.strfromlocal(
474 dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
477 dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
475 )
478 )
476
479
477 # try to make subject line exist and be useful
480 # try to make subject line exist and be useful
478 if not subject:
481 if not subject:
479 if count > 1:
482 if count > 1:
480 subject = _(b'%s: %d new changesets') % (self.root, count)
483 subject = _(b'%s: %d new changesets') % (self.root, count)
481 else:
484 else:
482 s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
485 s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
483 subject = b'%s: %s' % (self.root, s)
486 subject = b'%s: %s' % (self.root, s)
484 maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
487 maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
485 if maxsubject:
488 if maxsubject:
486 subject = stringutil.ellipsis(subject, maxsubject)
489 subject = stringutil.ellipsis(subject, maxsubject)
487 msg['Subject'] = mail.headencode(
490 msg['Subject'] = mail.headencode(
488 self.ui, subject, self.charsets, self.test
491 self.ui, subject, self.charsets, self.test
489 )
492 )
490
493
491 # try to make message have proper sender
494 # try to make message have proper sender
492 if not sender:
495 if not sender:
493 sender = self.ui.config(b'email', b'from') or self.ui.username()
496 sender = self.ui.config(b'email', b'from') or self.ui.username()
494 if b'@' not in sender or b'@localhost' in sender:
497 if b'@' not in sender or b'@localhost' in sender:
495 sender = self.fixmail(sender)
498 sender = self.fixmail(sender)
496 msg['From'] = mail.addressencode(
499 msg['From'] = mail.addressencode(
497 self.ui, sender, self.charsets, self.test
500 self.ui, sender, self.charsets, self.test
498 )
501 )
499
502
500 msg['X-Hg-Notification'] = 'changeset %s' % ctx
503 msg['X-Hg-Notification'] = 'changeset %s' % ctx
501 if not msg['Message-Id']:
504 if not msg['Message-Id']:
502 msg['Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
505 msg['Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
503 if self.reply:
506 if self.reply:
504 unfi = self.repo.unfiltered()
507 unfi = self.repo.unfiltered()
505 has_node = unfi.changelog.index.has_node
508 has_node = unfi.changelog.index.has_node
506 predecessors = [
509 predecessors = [
507 unfi[ctx2]
510 unfi[ctx2]
508 for ctx2 in obsutil.allpredecessors(unfi.obsstore, [ctx.node()])
511 for ctx2 in obsutil.allpredecessors(unfi.obsstore, [ctx.node()])
509 if ctx2 != ctx.node() and has_node(ctx2)
512 if ctx2 != ctx.node() and has_node(ctx2)
510 ]
513 ]
511 if predecessors:
514 if predecessors:
512 # There is at least one predecessor, so which to pick?
515 # There is at least one predecessor, so which to pick?
513 # Ideally, there is a unique root because changesets have
516 # Ideally, there is a unique root because changesets have
514 # been evolved/rebased one step at a time. In this case,
517 # been evolved/rebased one step at a time. In this case,
515 # just picking the oldest known changeset provides a stable
518 # just picking the oldest known changeset provides a stable
516 # base. It doesn't help when changesets are folded. Any
519 # base. It doesn't help when changesets are folded. Any
517 # better solution would require storing more information
520 # better solution would require storing more information
518 # in the repository.
521 # in the repository.
519 pred = min(predecessors, key=lambda ctx: ctx.rev())
522 pred = min(predecessors, key=lambda ctx: ctx.rev())
520 msg['In-Reply-To'] = messageid(
523 msg['In-Reply-To'] = messageid(
521 pred, self.domain, self.messageidseed
524 pred, self.domain, self.messageidseed
522 )
525 )
523 msg['To'] = ', '.join(sorted(subs))
526 msg['To'] = ', '.join(sorted(subs))
524
527
525 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
528 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
526 if self.test:
529 if self.test:
527 self.ui.write(msgtext)
530 self.ui.write(msgtext)
528 if not msgtext.endswith(b'\n'):
531 if not msgtext.endswith(b'\n'):
529 self.ui.write(b'\n')
532 self.ui.write(b'\n')
530 else:
533 else:
531 self.ui.status(
534 self.ui.status(
532 _(b'notify: sending %d subscribers %d changes\n')
535 _(b'notify: sending %d subscribers %d changes\n')
533 % (len(subs), count)
536 % (len(subs), count)
534 )
537 )
535 mail.sendmail(
538 mail.sendmail(
536 self.ui,
539 self.ui,
537 emailutils.parseaddr(msg['From'])[1],
540 emailutils.parseaddr(msg['From'])[1],
538 subs,
541 subs,
539 msgtext,
542 msgtext,
540 mbox=self.mbox,
543 mbox=self.mbox,
541 )
544 )
542
545
543 def diff(self, ctx, ref=None):
546 def diff(self, ctx, ref=None):
544
547
545 maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
548 maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
546 prev = ctx.p1().node()
549 prev = ctx.p1().node()
547 if ref:
550 if ref:
548 ref = ref.node()
551 ref = ref.node()
549 else:
552 else:
550 ref = ctx.node()
553 ref = ctx.node()
551 diffopts = patch.diffallopts(self.ui)
554 diffopts = patch.diffallopts(self.ui)
552 diffopts.showfunc = self.showfunc
555 diffopts.showfunc = self.showfunc
553 chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
556 chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
554 difflines = b''.join(chunks).splitlines()
557 difflines = b''.join(chunks).splitlines()
555
558
556 if self.ui.configbool(b'notify', b'diffstat'):
559 if self.ui.configbool(b'notify', b'diffstat'):
557 maxdiffstat = int(self.ui.config(b'notify', b'maxdiffstat'))
560 maxdiffstat = int(self.ui.config(b'notify', b'maxdiffstat'))
558 s = patch.diffstat(difflines)
561 s = patch.diffstat(difflines)
559 # s may be nil, don't include the header if it is
562 # s may be nil, don't include the header if it is
560 if s:
563 if s:
561 if maxdiffstat >= 0 and s.count(b"\n") > maxdiffstat + 1:
564 if maxdiffstat >= 0 and s.count(b"\n") > maxdiffstat + 1:
562 s = s.split(b"\n")
565 s = s.split(b"\n")
563 msg = _(b'\ndiffstat (truncated from %d to %d lines):\n\n')
566 msg = _(b'\ndiffstat (truncated from %d to %d lines):\n\n')
564 self.ui.write(msg % (len(s) - 2, maxdiffstat))
567 self.ui.write(msg % (len(s) - 2, maxdiffstat))
565 self.ui.write(b"\n".join(s[:maxdiffstat] + s[-2:]))
568 self.ui.write(b"\n".join(s[:maxdiffstat] + s[-2:]))
566 else:
569 else:
567 self.ui.write(_(b'\ndiffstat:\n\n%s') % s)
570 self.ui.write(_(b'\ndiffstat:\n\n%s') % s)
568
571
569 if maxdiff == 0:
572 if maxdiff == 0:
570 return
573 return
571 elif maxdiff > 0 and len(difflines) > maxdiff:
574 elif maxdiff > 0 and len(difflines) > maxdiff:
572 msg = _(b'\ndiffs (truncated from %d to %d lines):\n\n')
575 msg = _(b'\ndiffs (truncated from %d to %d lines):\n\n')
573 self.ui.write(msg % (len(difflines), maxdiff))
576 self.ui.write(msg % (len(difflines), maxdiff))
574 difflines = difflines[:maxdiff]
577 difflines = difflines[:maxdiff]
575 elif difflines:
578 elif difflines:
576 self.ui.write(_(b'\ndiffs (%d lines):\n\n') % len(difflines))
579 self.ui.write(_(b'\ndiffs (%d lines):\n\n') % len(difflines))
577
580
578 self.ui.write(b"\n".join(difflines))
581 self.ui.write(b"\n".join(difflines))
579
582
580
583
581 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
584 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
582 """send email notifications to interested subscribers.
585 """send email notifications to interested subscribers.
583
586
584 if used as changegroup hook, send one email for all changesets in
587 if used as changegroup hook, send one email for all changesets in
585 changegroup. else send one email per changeset."""
588 changegroup. else send one email per changeset."""
586
589
587 n = notifier(ui, repo, hooktype)
590 n = notifier(ui, repo, hooktype)
588 ctx = repo.unfiltered()[node]
591 ctx = repo.unfiltered()[node]
589
592
590 if not n.subs:
593 if not n.subs:
591 ui.debug(b'notify: no subscribers to repository %s\n' % n.root)
594 ui.debug(b'notify: no subscribers to repository %s\n' % n.root)
592 return
595 return
593 if n.skipsource(source):
596 if n.skipsource(source):
594 ui.debug(b'notify: changes have source "%s" - skipping\n' % source)
597 ui.debug(b'notify: changes have source "%s" - skipping\n' % source)
595 return
598 return
596
599
597 ui.pushbuffer()
600 ui.pushbuffer()
598 data = b''
601 data = b''
599 count = 0
602 count = 0
600 author = b''
603 author = b''
601 if hooktype == b'changegroup' or hooktype == b'outgoing':
604 if hooktype == b'changegroup' or hooktype == b'outgoing':
602 for rev in repo.changelog.revs(start=ctx.rev()):
605 for rev in repo.changelog.revs(start=ctx.rev()):
603 if n.node(repo[rev]):
606 if n.node(repo[rev]):
604 count += 1
607 count += 1
605 if not author:
608 if not author:
606 author = repo[rev].user()
609 author = repo[rev].user()
607 else:
610 else:
608 data += ui.popbuffer()
611 data += ui.popbuffer()
609 ui.note(
612 ui.note(
610 _(b'notify: suppressing notification for merge %d:%s\n')
613 _(b'notify: suppressing notification for merge %d:%s\n')
611 % (rev, repo[rev].hex()[:12])
614 % (rev, repo[rev].hex()[:12])
612 )
615 )
613 ui.pushbuffer()
616 ui.pushbuffer()
614 if count:
617 if count:
615 n.diff(ctx, repo[b'tip'])
618 n.diff(ctx, repo[b'tip'])
616 elif ctx.rev() in repo:
619 elif ctx.rev() in repo:
617 if not n.node(ctx):
620 if not n.node(ctx):
618 ui.popbuffer()
621 ui.popbuffer()
619 ui.note(
622 ui.note(
620 _(b'notify: suppressing notification for merge %d:%s\n')
623 _(b'notify: suppressing notification for merge %d:%s\n')
621 % (ctx.rev(), ctx.hex()[:12])
624 % (ctx.rev(), ctx.hex()[:12])
622 )
625 )
623 return
626 return
624 count += 1
627 count += 1
625 n.diff(ctx)
628 n.diff(ctx)
626 if not author:
629 if not author:
627 author = ctx.user()
630 author = ctx.user()
628
631
629 data += ui.popbuffer()
632 data += ui.popbuffer()
630 fromauthor = ui.config(b'notify', b'fromauthor')
633 fromauthor = ui.config(b'notify', b'fromauthor')
631 if author and fromauthor:
634 if author and fromauthor:
632 data = b'\n'.join([b'From: %s' % author, data])
635 data = b'\n'.join([b'From: %s' % author, data])
633
636
634 if count:
637 if count:
635 n.send(ctx, count, data)
638 n.send(ctx, count, data)
636
639
637
640
638 def messageid(ctx, domain, messageidseed):
641 def messageid(ctx, domain, messageidseed):
639 if domain and messageidseed:
642 if domain and messageidseed:
640 host = domain
643 host = domain
641 else:
644 else:
642 host = encoding.strtolocal(socket.getfqdn())
645 host = encoding.strtolocal(socket.getfqdn())
643 if messageidseed:
646 if messageidseed:
644 messagehash = hashlib.sha512(ctx.hex() + messageidseed)
647 messagehash = hashlib.sha512(ctx.hex() + messageidseed)
645 messageid = b'<hg.%s@%s>' % (
648 messageid = b'<hg.%s@%s>' % (
646 pycompat.sysbytes(messagehash.hexdigest()[:64]),
649 pycompat.sysbytes(messagehash.hexdigest()[:64]),
647 host,
650 host,
648 )
651 )
649 else:
652 else:
650 messageid = b'<hg.%s.%d.%d@%s>' % (
653 messageid = b'<hg.%s.%d.%d@%s>' % (
651 ctx,
654 ctx,
652 int(time.time()),
655 int(time.time()),
653 hash(ctx.repo().root),
656 hash(ctx.repo().root),
654 host,
657 host,
655 )
658 )
656 return encoding.strfromlocal(messageid)
659 return encoding.strfromlocal(messageid)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from tests/badserverext.py to tests/testlib/badserverext.py
NO CONTENT: file renamed from tests/badserverext.py to tests/testlib/badserverext.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now