##// END OF EJS Templates
branching: merge default into stable for 6.1 freeze
Raphaël Gomès -
r49650:c00d3ce4 merge 6.1rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,576 +1,576 b''
1 1 # cli.py - Command line interface for automation
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import argparse
11 11 import concurrent.futures as futures
12 12 import os
13 13 import pathlib
14 14 import time
15 15
16 16 from . import (
17 17 aws,
18 18 HGAutomation,
19 19 linux,
20 20 try_server,
21 21 windows,
22 22 )
23 23
24 24
25 25 SOURCE_ROOT = pathlib.Path(
26 26 os.path.abspath(__file__)
27 27 ).parent.parent.parent.parent
28 28 DIST_PATH = SOURCE_ROOT / 'dist'
29 29
30 30
31 31 def bootstrap_linux_dev(
32 32 hga: HGAutomation, aws_region, distros=None, parallel=False
33 33 ):
34 34 c = hga.aws_connection(aws_region)
35 35
36 36 if distros:
37 37 distros = distros.split(',')
38 38 else:
39 39 distros = sorted(linux.DISTROS)
40 40
41 41 # TODO There is a wonky interaction involving KeyboardInterrupt whereby
42 42 # the context manager that is supposed to terminate the temporary EC2
43 43 # instance doesn't run. Until we fix this, make parallel building opt-in
44 44 # so we don't orphan instances.
45 45 if parallel:
46 46 fs = []
47 47
48 48 with futures.ThreadPoolExecutor(len(distros)) as e:
49 49 for distro in distros:
50 50 fs.append(e.submit(aws.ensure_linux_dev_ami, c, distro=distro))
51 51
52 52 for f in fs:
53 53 f.result()
54 54 else:
55 55 for distro in distros:
56 56 aws.ensure_linux_dev_ami(c, distro=distro)
57 57
58 58
59 59 def bootstrap_windows_dev(hga: HGAutomation, aws_region, base_image_name):
60 60 c = hga.aws_connection(aws_region)
61 61 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
62 62 print('Windows development AMI available as %s' % image.id)
63 63
64 64
65 65 def build_inno(
66 66 hga: HGAutomation,
67 67 aws_region,
68 68 python_version,
69 69 arch,
70 70 revision,
71 71 version,
72 72 base_image_name,
73 73 ):
74 74 c = hga.aws_connection(aws_region)
75 75 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
76 76 DIST_PATH.mkdir(exist_ok=True)
77 77
78 78 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
79 79 instance = insts[0]
80 80
81 81 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
82 82
83 83 for py_version in python_version:
84 84 for a in arch:
85 85 windows.build_inno_installer(
86 86 instance.winrm_client,
87 87 py_version,
88 88 a,
89 89 DIST_PATH,
90 90 version=version,
91 91 )
92 92
93 93
94 94 def build_wix(
95 95 hga: HGAutomation,
96 96 aws_region,
97 97 python_version,
98 98 arch,
99 99 revision,
100 100 version,
101 101 base_image_name,
102 102 ):
103 103 c = hga.aws_connection(aws_region)
104 104 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
105 105 DIST_PATH.mkdir(exist_ok=True)
106 106
107 107 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
108 108 instance = insts[0]
109 109
110 110 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
111 111
112 112 for py_version in python_version:
113 113 for a in arch:
114 114 windows.build_wix_installer(
115 115 instance.winrm_client,
116 116 py_version,
117 117 a,
118 118 DIST_PATH,
119 119 version=version,
120 120 )
121 121
122 122
123 123 def build_windows_wheel(
124 124 hga: HGAutomation,
125 125 aws_region,
126 126 python_version,
127 127 arch,
128 128 revision,
129 129 base_image_name,
130 130 ):
131 131 c = hga.aws_connection(aws_region)
132 132 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
133 133 DIST_PATH.mkdir(exist_ok=True)
134 134
135 135 with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts:
136 136 instance = insts[0]
137 137
138 138 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
139 139
140 140 for py_version in python_version:
141 141 for a in arch:
142 142 windows.build_wheel(
143 143 instance.winrm_client, py_version, a, DIST_PATH
144 144 )
145 145
146 146
147 147 def build_all_windows_packages(
148 148 hga: HGAutomation, aws_region, revision, version, base_image_name
149 149 ):
150 150 c = hga.aws_connection(aws_region)
151 151 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
152 152 DIST_PATH.mkdir(exist_ok=True)
153 153
154 154 with aws.temporary_windows_dev_instances(c, image, 'm6i.large') as insts:
155 155 instance = insts[0]
156 156
157 157 winrm_client = instance.winrm_client
158 158
159 159 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
160 160
161 for py_version in ("2.7", "3.7", "3.8", "3.9"):
161 for py_version in ("2.7", "3.7", "3.8", "3.9", "3.10"):
162 162 for arch in ("x86", "x64"):
163 163 windows.purge_hg(winrm_client)
164 164 windows.build_wheel(
165 165 winrm_client,
166 166 python_version=py_version,
167 167 arch=arch,
168 168 dest_path=DIST_PATH,
169 169 )
170 170
171 171 for py_version in (2, 3):
172 172 for arch in ('x86', 'x64'):
173 173 windows.purge_hg(winrm_client)
174 174 windows.build_inno_installer(
175 175 winrm_client, py_version, arch, DIST_PATH, version=version
176 176 )
177 177 windows.build_wix_installer(
178 178 winrm_client, py_version, arch, DIST_PATH, version=version
179 179 )
180 180
181 181
182 182 def terminate_ec2_instances(hga: HGAutomation, aws_region):
183 183 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
184 184 aws.terminate_ec2_instances(c.ec2resource)
185 185
186 186
187 187 def purge_ec2_resources(hga: HGAutomation, aws_region):
188 188 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
189 189 aws.remove_resources(c)
190 190
191 191
192 192 def run_tests_linux(
193 193 hga: HGAutomation,
194 194 aws_region,
195 195 instance_type,
196 196 python_version,
197 197 test_flags,
198 198 distro,
199 199 filesystem,
200 200 ):
201 201 c = hga.aws_connection(aws_region)
202 202 image = aws.ensure_linux_dev_ami(c, distro=distro)
203 203
204 204 t_start = time.time()
205 205
206 206 ensure_extra_volume = filesystem not in ('default', 'tmpfs')
207 207
208 208 with aws.temporary_linux_dev_instances(
209 209 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
210 210 ) as insts:
211 211
212 212 instance = insts[0]
213 213
214 214 linux.prepare_exec_environment(
215 215 instance.ssh_client, filesystem=filesystem
216 216 )
217 217 linux.synchronize_hg(SOURCE_ROOT, instance, '.')
218 218 t_prepared = time.time()
219 219 linux.run_tests(instance.ssh_client, python_version, test_flags)
220 220 t_done = time.time()
221 221
222 222 t_setup = t_prepared - t_start
223 223 t_all = t_done - t_start
224 224
225 225 print(
226 226 'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
227 227 % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
228 228 )
229 229
230 230
231 231 def run_tests_windows(
232 232 hga: HGAutomation,
233 233 aws_region,
234 234 instance_type,
235 235 python_version,
236 236 arch,
237 237 test_flags,
238 238 base_image_name,
239 239 ):
240 240 c = hga.aws_connection(aws_region)
241 241 image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
242 242
243 243 with aws.temporary_windows_dev_instances(
244 244 c, image, instance_type, disable_antivirus=True
245 245 ) as insts:
246 246 instance = insts[0]
247 247
248 248 windows.synchronize_hg(SOURCE_ROOT, '.', instance)
249 249 windows.run_tests(
250 250 instance.winrm_client, python_version, arch, test_flags
251 251 )
252 252
253 253
254 254 def publish_windows_artifacts(
255 255 hg: HGAutomation,
256 256 aws_region,
257 257 version: str,
258 258 pypi: bool,
259 259 mercurial_scm_org: bool,
260 260 ssh_username: str,
261 261 ):
262 262 windows.publish_artifacts(
263 263 DIST_PATH,
264 264 version,
265 265 pypi=pypi,
266 266 mercurial_scm_org=mercurial_scm_org,
267 267 ssh_username=ssh_username,
268 268 )
269 269
270 270
271 271 def run_try(hga: HGAutomation, aws_region: str, rev: str):
272 272 c = hga.aws_connection(aws_region, ensure_ec2_state=False)
273 273 try_server.trigger_try(c, rev=rev)
274 274
275 275
276 276 def get_parser():
277 277 parser = argparse.ArgumentParser()
278 278
279 279 parser.add_argument(
280 280 '--state-path',
281 281 default='~/.hgautomation',
282 282 help='Path for local state files',
283 283 )
284 284 parser.add_argument(
285 285 '--aws-region',
286 286 help='AWS region to use',
287 287 default='us-west-2',
288 288 )
289 289
290 290 subparsers = parser.add_subparsers()
291 291
292 292 sp = subparsers.add_parser(
293 293 'bootstrap-linux-dev',
294 294 help='Bootstrap Linux development environments',
295 295 )
296 296 sp.add_argument(
297 297 '--distros',
298 298 help='Comma delimited list of distros to bootstrap',
299 299 )
300 300 sp.add_argument(
301 301 '--parallel',
302 302 action='store_true',
303 303 help='Generate AMIs in parallel (not CTRL-c safe)',
304 304 )
305 305 sp.set_defaults(func=bootstrap_linux_dev)
306 306
307 307 sp = subparsers.add_parser(
308 308 'bootstrap-windows-dev',
309 309 help='Bootstrap the Windows development environment',
310 310 )
311 311 sp.add_argument(
312 312 '--base-image-name',
313 313 help='AMI name of base image',
314 314 default=aws.WINDOWS_BASE_IMAGE_NAME,
315 315 )
316 316 sp.set_defaults(func=bootstrap_windows_dev)
317 317
318 318 sp = subparsers.add_parser(
319 319 'build-all-windows-packages',
320 320 help='Build all Windows packages',
321 321 )
322 322 sp.add_argument(
323 323 '--revision',
324 324 help='Mercurial revision to build',
325 325 default='.',
326 326 )
327 327 sp.add_argument(
328 328 '--version',
329 329 help='Mercurial version string to use',
330 330 )
331 331 sp.add_argument(
332 332 '--base-image-name',
333 333 help='AMI name of base image',
334 334 default=aws.WINDOWS_BASE_IMAGE_NAME,
335 335 )
336 336 sp.set_defaults(func=build_all_windows_packages)
337 337
338 338 sp = subparsers.add_parser(
339 339 'build-inno',
340 340 help='Build Inno Setup installer(s)',
341 341 )
342 342 sp.add_argument(
343 343 '--python-version',
344 344 help='Which version of Python to target',
345 345 choices={2, 3},
346 346 type=int,
347 347 nargs='*',
348 348 default=[3],
349 349 )
350 350 sp.add_argument(
351 351 '--arch',
352 352 help='Architecture to build for',
353 353 choices={'x86', 'x64'},
354 354 nargs='*',
355 355 default=['x64'],
356 356 )
357 357 sp.add_argument(
358 358 '--revision',
359 359 help='Mercurial revision to build',
360 360 default='.',
361 361 )
362 362 sp.add_argument(
363 363 '--version',
364 364 help='Mercurial version string to use in installer',
365 365 )
366 366 sp.add_argument(
367 367 '--base-image-name',
368 368 help='AMI name of base image',
369 369 default=aws.WINDOWS_BASE_IMAGE_NAME,
370 370 )
371 371 sp.set_defaults(func=build_inno)
372 372
373 373 sp = subparsers.add_parser(
374 374 'build-windows-wheel',
375 375 help='Build Windows wheel(s)',
376 376 )
377 377 sp.add_argument(
378 378 '--python-version',
379 379 help='Python version to build for',
380 choices={'2.7', '3.7', '3.8', '3.9'},
380 choices={'2.7', '3.7', '3.8', '3.9', '3.10'},
381 381 nargs='*',
382 382 default=['3.8'],
383 383 )
384 384 sp.add_argument(
385 385 '--arch',
386 386 help='Architecture to build for',
387 387 choices={'x86', 'x64'},
388 388 nargs='*',
389 389 default=['x64'],
390 390 )
391 391 sp.add_argument(
392 392 '--revision',
393 393 help='Mercurial revision to build',
394 394 default='.',
395 395 )
396 396 sp.add_argument(
397 397 '--base-image-name',
398 398 help='AMI name of base image',
399 399 default=aws.WINDOWS_BASE_IMAGE_NAME,
400 400 )
401 401 sp.set_defaults(func=build_windows_wheel)
402 402
403 403 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
404 404 sp.add_argument(
405 405 '--python-version',
406 406 help='Which version of Python to target',
407 407 choices={2, 3},
408 408 type=int,
409 409 nargs='*',
410 410 default=[3],
411 411 )
412 412 sp.add_argument(
413 413 '--arch',
414 414 help='Architecture to build for',
415 415 choices={'x86', 'x64'},
416 416 nargs='*',
417 417 default=['x64'],
418 418 )
419 419 sp.add_argument(
420 420 '--revision',
421 421 help='Mercurial revision to build',
422 422 default='.',
423 423 )
424 424 sp.add_argument(
425 425 '--version',
426 426 help='Mercurial version string to use in installer',
427 427 )
428 428 sp.add_argument(
429 429 '--base-image-name',
430 430 help='AMI name of base image',
431 431 default=aws.WINDOWS_BASE_IMAGE_NAME,
432 432 )
433 433 sp.set_defaults(func=build_wix)
434 434
435 435 sp = subparsers.add_parser(
436 436 'terminate-ec2-instances',
437 437 help='Terminate all active EC2 instances managed by us',
438 438 )
439 439 sp.set_defaults(func=terminate_ec2_instances)
440 440
441 441 sp = subparsers.add_parser(
442 442 'purge-ec2-resources',
443 443 help='Purge all EC2 resources managed by us',
444 444 )
445 445 sp.set_defaults(func=purge_ec2_resources)
446 446
447 447 sp = subparsers.add_parser(
448 448 'run-tests-linux',
449 449 help='Run tests on Linux',
450 450 )
451 451 sp.add_argument(
452 452 '--distro',
453 453 help='Linux distribution to run tests on',
454 454 choices=linux.DISTROS,
455 455 default='debian10',
456 456 )
457 457 sp.add_argument(
458 458 '--filesystem',
459 459 help='Filesystem type to use',
460 460 choices={'btrfs', 'default', 'ext3', 'ext4', 'jfs', 'tmpfs', 'xfs'},
461 461 default='default',
462 462 )
463 463 sp.add_argument(
464 464 '--instance-type',
465 465 help='EC2 instance type to use',
466 466 default='c5.9xlarge',
467 467 )
468 468 sp.add_argument(
469 469 '--python-version',
470 470 help='Python version to use',
471 471 choices={
472 472 'system2',
473 473 'system3',
474 474 '2.7',
475 475 '3.5',
476 476 '3.6',
477 477 '3.7',
478 478 '3.8',
479 479 'pypy',
480 480 'pypy3.5',
481 481 'pypy3.6',
482 482 },
483 483 default='system2',
484 484 )
485 485 sp.add_argument(
486 486 'test_flags',
487 487 help='Extra command line flags to pass to run-tests.py',
488 488 nargs='*',
489 489 )
490 490 sp.set_defaults(func=run_tests_linux)
491 491
492 492 sp = subparsers.add_parser(
493 493 'run-tests-windows',
494 494 help='Run tests on Windows',
495 495 )
496 496 sp.add_argument(
497 497 '--instance-type',
498 498 help='EC2 instance type to use',
499 499 default='m6i.large',
500 500 )
501 501 sp.add_argument(
502 502 '--python-version',
503 503 help='Python version to use',
504 choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9'},
504 choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
505 505 default='2.7',
506 506 )
507 507 sp.add_argument(
508 508 '--arch',
509 509 help='Architecture to test',
510 510 choices={'x86', 'x64'},
511 511 default='x64',
512 512 )
513 513 sp.add_argument(
514 514 '--test-flags',
515 515 help='Extra command line flags to pass to run-tests.py',
516 516 )
517 517 sp.add_argument(
518 518 '--base-image-name',
519 519 help='AMI name of base image',
520 520 default=aws.WINDOWS_BASE_IMAGE_NAME,
521 521 )
522 522 sp.set_defaults(func=run_tests_windows)
523 523
524 524 sp = subparsers.add_parser(
525 525 'publish-windows-artifacts',
526 526 help='Publish built Windows artifacts (wheels, installers, etc)',
527 527 )
528 528 sp.add_argument(
529 529 '--no-pypi',
530 530 dest='pypi',
531 531 action='store_false',
532 532 default=True,
533 533 help='Skip uploading to PyPI',
534 534 )
535 535 sp.add_argument(
536 536 '--no-mercurial-scm-org',
537 537 dest='mercurial_scm_org',
538 538 action='store_false',
539 539 default=True,
540 540 help='Skip uploading to www.mercurial-scm.org',
541 541 )
542 542 sp.add_argument(
543 543 '--ssh-username',
544 544 help='SSH username for mercurial-scm.org',
545 545 )
546 546 sp.add_argument(
547 547 'version',
548 548 help='Mercurial version string to locate local packages',
549 549 )
550 550 sp.set_defaults(func=publish_windows_artifacts)
551 551
552 552 sp = subparsers.add_parser(
553 553 'try', help='Run CI automation against a custom changeset'
554 554 )
555 555 sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
556 556 sp.set_defaults(func=run_try)
557 557
558 558 return parser
559 559
560 560
561 561 def main():
562 562 parser = get_parser()
563 563 args = parser.parse_args()
564 564
565 565 local_state_path = pathlib.Path(os.path.expanduser(args.state_path))
566 566 automation = HGAutomation(local_state_path)
567 567
568 568 if not hasattr(args, 'func'):
569 569 parser.print_help()
570 570 return
571 571
572 572 kwargs = dict(vars(args))
573 573 del kwargs['func']
574 574 del kwargs['state_path']
575 575
576 576 args.func(automation, **kwargs)
@@ -1,674 +1,680 b''
1 1 # windows.py - Automation specific to Windows
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # no-check-code because Python 3 native.
9 9
10 10 import datetime
11 11 import os
12 12 import paramiko
13 13 import pathlib
14 14 import re
15 15 import subprocess
16 16 import tempfile
17 17
18 18 from .pypi import upload as pypi_upload
19 19 from .winrm import run_powershell
20 20
21 21
22 22 # PowerShell commands to activate a Visual Studio 2008 environment.
23 23 # This is essentially a port of vcvarsall.bat to PowerShell.
24 24 ACTIVATE_VC9_AMD64 = r'''
25 25 Write-Output "activating Visual Studio 2008 environment for AMD64"
26 26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
27 27 $Env:VCINSTALLDIR = "${root}\VC\"
28 28 $Env:WindowsSdkDir = "${root}\WinSDK\"
29 29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
30 30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
31 31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
32 32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
33 33 '''.lstrip()
34 34
35 35 ACTIVATE_VC9_X86 = r'''
36 36 Write-Output "activating Visual Studio 2008 environment for x86"
37 37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
38 38 $Env:VCINSTALLDIR = "${root}\VC\"
39 39 $Env:WindowsSdkDir = "${root}\WinSDK\"
40 40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
41 41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
42 42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
43 43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
44 44 '''.lstrip()
45 45
46 46 HG_PURGE = r'''
47 47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
48 48 Set-Location C:\hgdev\src
49 49 hg.exe --config extensions.purge= purge --all
50 50 if ($LASTEXITCODE -ne 0) {
51 51 throw "process exited non-0: $LASTEXITCODE"
52 52 }
53 53 Write-Output "purged Mercurial repo"
54 54 '''
55 55
56 56 HG_UPDATE_CLEAN = r'''
57 57 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
58 58 Set-Location C:\hgdev\src
59 59 hg.exe --config extensions.purge= purge --all
60 60 if ($LASTEXITCODE -ne 0) {{
61 61 throw "process exited non-0: $LASTEXITCODE"
62 62 }}
63 63 hg.exe update -C {revision}
64 64 if ($LASTEXITCODE -ne 0) {{
65 65 throw "process exited non-0: $LASTEXITCODE"
66 66 }}
67 67 hg.exe log -r .
68 68 Write-Output "updated Mercurial working directory to {revision}"
69 69 '''.lstrip()
70 70
71 71 BUILD_INNO_PYTHON3 = r'''
72 72 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
73 73 $Env:CARGO_HOME = "C:\hgdev\cargo"
74 74 Set-Location C:\hgdev\src
75 75 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --pyoxidizer-target {pyoxidizer_target} --version {version}
76 76 if ($LASTEXITCODE -ne 0) {{
77 77 throw "process exited non-0: $LASTEXITCODE"
78 78 }}
79 79 '''
80 80
81 81 BUILD_INNO_PYTHON2 = r'''
82 82 Set-Location C:\hgdev\src
83 83 $python = "C:\hgdev\python27-{arch}\python.exe"
84 84 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
85 85 if ($LASTEXITCODE -ne 0) {{
86 86 throw "process exited non-0: $LASTEXITCODE"
87 87 }}
88 88 '''.lstrip()
89 89
90 90 BUILD_WHEEL = r'''
91 91 Set-Location C:\hgdev\src
92 92 C:\hgdev\python{python_version}-{arch}\python.exe -m pip wheel --wheel-dir dist .
93 93 if ($LASTEXITCODE -ne 0) {{
94 94 throw "process exited non-0: $LASTEXITCODE"
95 95 }}
96 96 '''
97 97
98 98 BUILD_WIX_PYTHON3 = r'''
99 99 $Env:RUSTUP_HOME = "C:\hgdev\rustup"
100 100 $Env:CARGO_HOME = "C:\hgdev\cargo"
101 101 Set-Location C:\hgdev\src
102 102 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --pyoxidizer-target {pyoxidizer_target} --version {version}
103 103 if ($LASTEXITCODE -ne 0) {{
104 104 throw "process exited non-0: $LASTEXITCODE"
105 105 }}
106 106 '''
107 107
108 108 BUILD_WIX_PYTHON2 = r'''
109 109 Set-Location C:\hgdev\src
110 110 $python = "C:\hgdev\python27-{arch}\python.exe"
111 111 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
112 112 if ($LASTEXITCODE -ne 0) {{
113 113 throw "process exited non-0: $LASTEXITCODE"
114 114 }}
115 115 '''
116 116
117 117 RUN_TESTS = r'''
118 118 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
119 119 if ($LASTEXITCODE -ne 0) {{
120 120 throw "process exited non-0: $LASTEXITCODE"
121 121 }}
122 122 '''
123 123
124 124 WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
125 125 WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
126 126 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
127 127 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
128 128 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
129 129 WHEEL_FILENAME_PYTHON38_X64 = 'mercurial-{version}-cp38-cp38-win_amd64.whl'
130 130 WHEEL_FILENAME_PYTHON39_X86 = 'mercurial-{version}-cp39-cp39-win32.whl'
131 131 WHEEL_FILENAME_PYTHON39_X64 = 'mercurial-{version}-cp39-cp39-win_amd64.whl'
132 WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
133 WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
132 134
133 135 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
134 136 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
135 137 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
136 138 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
137 139
138 140 MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
139 141 MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
140 142 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
141 143 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
142 144
143 145 MERCURIAL_SCM_BASE_URL = 'https://mercurial-scm.org/release/windows'
144 146
145 147 X86_USER_AGENT_PATTERN = '.*Windows.*'
146 148 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
147 149
148 150 EXE_PYTHON2_X86_DESCRIPTION = (
149 151 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
150 152 '- does not require admin rights'
151 153 )
152 154 EXE_PYTHON2_X64_DESCRIPTION = (
153 155 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
154 156 '- does not require admin rights'
155 157 )
156 158 # TODO remove Python version once Python 2 is dropped.
157 159 EXE_PYTHON3_X86_DESCRIPTION = (
158 160 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
159 161 '- does not require admin rights'
160 162 )
161 163 EXE_PYTHON3_X64_DESCRIPTION = (
162 164 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
163 165 '- does not require admin rights'
164 166 )
165 167 MSI_PYTHON2_X86_DESCRIPTION = (
166 168 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
167 169 '- requires admin rights'
168 170 )
169 171 MSI_PYTHON2_X64_DESCRIPTION = (
170 172 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
171 173 '- requires admin rights'
172 174 )
173 175 MSI_PYTHON3_X86_DESCRIPTION = (
174 176 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
175 177 '- requires admin rights'
176 178 )
177 179 MSI_PYTHON3_X64_DESCRIPTION = (
178 180 'Mercurial {version} MSI installer - x64 Windows (Python 3) '
179 181 '- requires admin rights'
180 182 )
181 183
182 184
183 185 def get_vc_prefix(arch):
184 186 if arch == 'x86':
185 187 return ACTIVATE_VC9_X86
186 188 elif arch == 'x64':
187 189 return ACTIVATE_VC9_AMD64
188 190 else:
189 191 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
190 192
191 193
192 194 def fix_authorized_keys_permissions(winrm_client, path):
193 195 commands = [
194 196 '$ErrorActionPreference = "Stop"',
195 197 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path,
196 198 r'icacls %s /remove:g "NT Service\sshd"' % path,
197 199 ]
198 200
199 201 run_powershell(winrm_client, '\n'.join(commands))
200 202
201 203
202 204 def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance):
203 205 """Synchronize local Mercurial repo to remote EC2 instance."""
204 206
205 207 winrm_client = ec2_instance.winrm_client
206 208
207 209 with tempfile.TemporaryDirectory() as temp_dir:
208 210 temp_dir = pathlib.Path(temp_dir)
209 211
210 212 ssh_dir = temp_dir / '.ssh'
211 213 ssh_dir.mkdir()
212 214 ssh_dir.chmod(0o0700)
213 215
214 216 # Generate SSH key to use for communication.
215 217 subprocess.run(
216 218 [
217 219 'ssh-keygen',
218 220 '-t',
219 221 'rsa',
220 222 '-b',
221 223 '4096',
222 224 '-N',
223 225 '',
224 226 '-f',
225 227 str(ssh_dir / 'id_rsa'),
226 228 ],
227 229 check=True,
228 230 capture_output=True,
229 231 )
230 232
231 233 # Add it to ~/.ssh/authorized_keys on remote.
232 234 # This assumes the file doesn't already exist.
233 235 authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys'
234 236 winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh')
235 237 winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys)
236 238 fix_authorized_keys_permissions(winrm_client, authorized_keys)
237 239
238 240 public_ip = ec2_instance.public_ip_address
239 241
240 242 ssh_config = temp_dir / '.ssh' / 'config'
241 243
242 244 with open(ssh_config, 'w', encoding='utf-8') as fh:
243 245 fh.write('Host %s\n' % public_ip)
244 246 fh.write(' User Administrator\n')
245 247 fh.write(' StrictHostKeyChecking no\n')
246 248 fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
247 249 fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
248 250
249 251 if not (hg_repo / '.hg').is_dir():
250 252 raise Exception(
251 253 '%s is not a Mercurial repository; '
252 254 'synchronization not yet supported' % hg_repo
253 255 )
254 256
255 257 env = dict(os.environ)
256 258 env['HGPLAIN'] = '1'
257 259 env['HGENCODING'] = 'utf-8'
258 260
259 261 hg_bin = hg_repo / 'hg'
260 262
261 263 res = subprocess.run(
262 264 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
263 265 cwd=str(hg_repo),
264 266 env=env,
265 267 check=True,
266 268 capture_output=True,
267 269 )
268 270
269 271 full_revision = res.stdout.decode('ascii')
270 272
271 273 args = [
272 274 'python2.7',
273 275 hg_bin,
274 276 '--config',
275 277 'ui.ssh=ssh -F %s' % ssh_config,
276 278 '--config',
277 279 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
278 280 # Also ensure .hgtags changes are present so auto version
279 281 # calculation works.
280 282 'push',
281 283 '-f',
282 284 '-r',
283 285 full_revision,
284 286 '-r',
285 287 'file(.hgtags)',
286 288 'ssh://%s/c:/hgdev/src' % public_ip,
287 289 ]
288 290
289 291 res = subprocess.run(args, cwd=str(hg_repo), env=env)
290 292
291 293 # Allow 1 (no-op) to not trigger error.
292 294 if res.returncode not in (0, 1):
293 295 res.check_returncode()
294 296
295 297 run_powershell(
296 298 winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
297 299 )
298 300
299 301 # TODO detect dirty local working directory and synchronize accordingly.
300 302
301 303
302 304 def purge_hg(winrm_client):
303 305 """Purge the Mercurial source repository on an EC2 instance."""
304 306 run_powershell(winrm_client, HG_PURGE)
305 307
306 308
307 309 def find_latest_dist(winrm_client, pattern):
308 310 """Find path to newest file in dist/ directory matching a pattern."""
309 311
310 312 res = winrm_client.execute_ps(
311 313 r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" '
312 314 '| Sort-Object LastWriteTime -Descending '
313 315 '| Select-Object -First 1\n'
314 316 '$v.name' % pattern
315 317 )
316 318 return res[0]
317 319
318 320
319 321 def copy_latest_dist(winrm_client, pattern, dest_path):
320 322 """Copy latest file matching pattern in dist/ directory.
321 323
322 324 Given a WinRM client and a file pattern, find the latest file on the remote
323 325 matching that pattern and copy it to the ``dest_path`` directory on the
324 326 local machine.
325 327 """
326 328 latest = find_latest_dist(winrm_client, pattern)
327 329 source = r'C:\hgdev\src\dist\%s' % latest
328 330 dest = dest_path / latest
329 331 print('copying %s to %s' % (source, dest))
330 332 winrm_client.fetch(source, str(dest))
331 333
332 334
333 335 def build_inno_installer(
334 336 winrm_client,
335 337 python_version: int,
336 338 arch: str,
337 339 dest_path: pathlib.Path,
338 340 version=None,
339 341 ):
340 342 """Build the Inno Setup installer on a remote machine.
341 343
342 344 Using a WinRM client, remote commands are executed to build
343 345 a Mercurial Inno Setup installer.
344 346 """
345 347 print(
346 348 'building Inno Setup installer for Python %d %s'
347 349 % (python_version, arch)
348 350 )
349 351
350 352 if python_version == 3:
351 353 # TODO fix this limitation in packaging code
352 354 if not version:
353 355 raise Exception(
354 356 "version string is required when building for Python 3"
355 357 )
356 358
357 359 if arch == "x86":
358 360 target_triple = "i686-pc-windows-msvc"
359 361 elif arch == "x64":
360 362 target_triple = "x86_64-pc-windows-msvc"
361 363 else:
362 364 raise Exception("unhandled arch: %s" % arch)
363 365
364 366 ps = BUILD_INNO_PYTHON3.format(
365 367 pyoxidizer_target=target_triple,
366 368 version=version,
367 369 )
368 370 else:
369 371 extra_args = []
370 372 if version:
371 373 extra_args.extend(['--version', version])
372 374
373 375 ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
374 376 arch=arch, extra_args=' '.join(extra_args)
375 377 )
376 378
377 379 run_powershell(winrm_client, ps)
378 380 copy_latest_dist(winrm_client, '*.exe', dest_path)
379 381
380 382
381 383 def build_wheel(
382 384 winrm_client, python_version: str, arch: str, dest_path: pathlib.Path
383 385 ):
384 386 """Build Python wheels on a remote machine.
385 387
386 388 Using a WinRM client, remote commands are executed to build a Python wheel
387 389 for Mercurial.
388 390 """
389 391 print('Building Windows wheel for Python %s %s' % (python_version, arch))
390 392
391 393 ps = BUILD_WHEEL.format(
392 394 python_version=python_version.replace(".", ""), arch=arch
393 395 )
394 396
395 397 # Python 2.7 requires an activated environment.
396 398 if python_version == "2.7":
397 399 ps = get_vc_prefix(arch) + ps
398 400
399 401 run_powershell(winrm_client, ps)
400 402 copy_latest_dist(winrm_client, '*.whl', dest_path)
401 403
402 404
403 405 def build_wix_installer(
404 406 winrm_client,
405 407 python_version: int,
406 408 arch: str,
407 409 dest_path: pathlib.Path,
408 410 version=None,
409 411 ):
410 412 """Build the WiX installer on a remote machine.
411 413
412 414 Using a WinRM client, remote commands are executed to build a WiX installer.
413 415 """
414 416 print('Building WiX installer for Python %d %s' % (python_version, arch))
415 417
416 418 if python_version == 3:
417 419 # TODO fix this limitation in packaging code
418 420 if not version:
419 421 raise Exception(
420 422 "version string is required when building for Python 3"
421 423 )
422 424
423 425 if arch == "x86":
424 426 target_triple = "i686-pc-windows-msvc"
425 427 elif arch == "x64":
426 428 target_triple = "x86_64-pc-windows-msvc"
427 429 else:
428 430 raise Exception("unhandled arch: %s" % arch)
429 431
430 432 ps = BUILD_WIX_PYTHON3.format(
431 433 pyoxidizer_target=target_triple,
432 434 version=version,
433 435 )
434 436 else:
435 437 extra_args = []
436 438 if version:
437 439 extra_args.extend(['--version', version])
438 440
439 441 ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
440 442 arch=arch, extra_args=' '.join(extra_args)
441 443 )
442 444
443 445 run_powershell(winrm_client, ps)
444 446 copy_latest_dist(winrm_client, '*.msi', dest_path)
445 447
446 448
447 449 def run_tests(winrm_client, python_version, arch, test_flags=''):
448 450 """Run tests on a remote Windows machine.
449 451
450 452 ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``.
451 453 ``arch`` is ``x86`` or ``x64``.
452 454 ``test_flags`` is a str representing extra arguments to pass to
453 455 ``run-tests.py``.
454 456 """
455 457 if not re.match(r'\d\.\d', python_version):
456 458 raise ValueError(
457 459 r'python_version must be \d.\d; got %s' % python_version
458 460 )
459 461
460 462 if arch not in ('x86', 'x64'):
461 463 raise ValueError('arch must be x86 or x64; got %s' % arch)
462 464
463 465 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
464 466
465 467 ps = RUN_TESTS.format(
466 468 python_path=python_path,
467 469 test_flags=test_flags or '',
468 470 )
469 471
470 472 run_powershell(winrm_client, ps)
471 473
472 474
473 475 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
474 476 return (
475 477 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
476 478 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
477 479 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
478 480 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
479 481 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
480 482 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
481 483 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
482 484 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
485 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
486 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
483 487 )
484 488
485 489
486 490 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
487 491 return (
488 492 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
489 493 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
490 494 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
491 495 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
492 496 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
493 497 dist_path / WHEEL_FILENAME_PYTHON38_X64.format(version=version),
494 498 dist_path / WHEEL_FILENAME_PYTHON39_X86.format(version=version),
495 499 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
500 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
501 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
496 502 dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
497 503 dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
498 504 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
499 505 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
500 506 dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
501 507 dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
502 508 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
503 509 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
504 510 )
505 511
506 512
507 513 def generate_latest_dat(version: str):
508 514 python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
509 515 python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
510 516 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
511 517 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
512 518 python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
513 519 python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
514 520 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
515 521 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
516 522
517 523 entries = (
518 524 (
519 525 '10',
520 526 version,
521 527 X86_USER_AGENT_PATTERN,
522 528 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_exe_filename),
523 529 EXE_PYTHON3_X86_DESCRIPTION.format(version=version),
524 530 ),
525 531 (
526 532 '10',
527 533 version,
528 534 X64_USER_AGENT_PATTERN,
529 535 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_exe_filename),
530 536 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
531 537 ),
532 538 (
533 539 '9',
534 540 version,
535 541 X86_USER_AGENT_PATTERN,
536 542 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
537 543 EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
538 544 ),
539 545 (
540 546 '9',
541 547 version,
542 548 X64_USER_AGENT_PATTERN,
543 549 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
544 550 EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
545 551 ),
546 552 (
547 553 '10',
548 554 version,
549 555 X86_USER_AGENT_PATTERN,
550 556 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x86_msi_filename),
551 557 MSI_PYTHON3_X86_DESCRIPTION.format(version=version),
552 558 ),
553 559 (
554 560 '10',
555 561 version,
556 562 X64_USER_AGENT_PATTERN,
557 563 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
558 564 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
559 565 ),
560 566 (
561 567 '9',
562 568 version,
563 569 X86_USER_AGENT_PATTERN,
564 570 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
565 571 MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
566 572 ),
567 573 (
568 574 '9',
569 575 version,
570 576 X64_USER_AGENT_PATTERN,
571 577 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
572 578 MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
573 579 ),
574 580 )
575 581
576 582 lines = ['\t'.join(e) for e in entries]
577 583
578 584 return '\n'.join(lines) + '\n'
579 585
580 586
581 587 def publish_artifacts_pypi(dist_path: pathlib.Path, version: str):
582 588 """Publish Windows release artifacts to PyPI."""
583 589
584 590 wheel_paths = resolve_wheel_artifacts(dist_path, version)
585 591
586 592 for p in wheel_paths:
587 593 if not p.exists():
588 594 raise Exception('%s not found' % p)
589 595
590 596 print('uploading wheels to PyPI (you may be prompted for credentials)')
591 597 pypi_upload(wheel_paths)
592 598
593 599
594 600 def publish_artifacts_mercurial_scm_org(
595 601 dist_path: pathlib.Path, version: str, ssh_username=None
596 602 ):
597 603 """Publish Windows release artifacts to mercurial-scm.org."""
598 604 all_paths = resolve_all_artifacts(dist_path, version)
599 605
600 606 for p in all_paths:
601 607 if not p.exists():
602 608 raise Exception('%s not found' % p)
603 609
604 610 client = paramiko.SSHClient()
605 611 client.load_system_host_keys()
606 612 # We assume the system SSH configuration knows how to connect.
607 613 print('connecting to mercurial-scm.org via ssh...')
608 614 try:
609 615 client.connect('mercurial-scm.org', username=ssh_username)
610 616 except paramiko.AuthenticationException:
611 617 print('error authenticating; is an SSH key available in an SSH agent?')
612 618 raise
613 619
614 620 print('SSH connection established')
615 621
616 622 print('opening SFTP client...')
617 623 sftp = client.open_sftp()
618 624 print('SFTP client obtained')
619 625
620 626 for p in all_paths:
621 627 dest_path = '/var/www/release/windows/%s' % p.name
622 628 print('uploading %s to %s' % (p, dest_path))
623 629
624 630 with p.open('rb') as fh:
625 631 data = fh.read()
626 632
627 633 with sftp.open(dest_path, 'wb') as fh:
628 634 fh.write(data)
629 635 fh.chmod(0o0664)
630 636
631 637 latest_dat_path = '/var/www/release/windows/latest.dat'
632 638
633 639 now = datetime.datetime.utcnow()
634 640 backup_path = dist_path / (
635 641 'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
636 642 )
637 643 print('backing up %s to %s' % (latest_dat_path, backup_path))
638 644
639 645 with sftp.open(latest_dat_path, 'rb') as fh:
640 646 latest_dat_old = fh.read()
641 647
642 648 with backup_path.open('wb') as fh:
643 649 fh.write(latest_dat_old)
644 650
645 651 print('writing %s with content:' % latest_dat_path)
646 652 latest_dat_content = generate_latest_dat(version)
647 653 print(latest_dat_content)
648 654
649 655 with sftp.open(latest_dat_path, 'wb') as fh:
650 656 fh.write(latest_dat_content.encode('ascii'))
651 657
652 658
653 659 def publish_artifacts(
654 660 dist_path: pathlib.Path,
655 661 version: str,
656 662 pypi=True,
657 663 mercurial_scm_org=True,
658 664 ssh_username=None,
659 665 ):
660 666 """Publish Windows release artifacts.
661 667
662 668 Files are found in `dist_path`. We will look for files with version string
663 669 `version`.
664 670
665 671 `pypi` controls whether we upload to PyPI.
666 672 `mercurial_scm_org` controls whether we upload to mercurial-scm.org.
667 673 """
668 674 if pypi:
669 675 publish_artifacts_pypi(dist_path, version)
670 676
671 677 if mercurial_scm_org:
672 678 publish_artifacts_mercurial_scm_org(
673 679 dist_path, version, ssh_username=ssh_username
674 680 )
@@ -1,188 +1,193 b''
1 1 stages:
2 2 - tests
3 3 - phabricator
4 4
5 5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
6 6
7 7 variables:
8 8 PYTHON: python
9 9 TEST_HGMODULEPOLICY: "allow"
10 10 HG_CI_IMAGE_TAG: "latest"
11 11 TEST_HGTESTS_ALLOW_NETIO: "0"
12 12
13 13 .all_template: &all
14 14 when: always
15 15
16 16 .runtests_template: &runtests
17 17 <<: *all
18 18 stage: tests
19 19 # The runner made a clone as root.
20 20 # We make a new clone owned by user used to run the step.
21 21 before_script:
22 22 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
23 23 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
24 24 - cd /tmp/mercurial-ci/
25 25 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
26 26 - black --version
27 27 - clang-format --version
28 28 script:
29 29 - echo "python used, $PYTHON"
30 30 - echo "$RUNTEST_ARGS"
31 31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
32 32
33 33 checks-py2:
34 34 <<: *runtests
35 35 variables:
36 36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
37 37
38 38 checks-py3:
39 39 <<: *runtests
40 40 variables:
41 41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
42 42 PYTHON: python3
43 43
44 44 rust-cargo-test-py2: &rust_cargo_test
45 45 <<: *all
46 46 stage: tests
47 47 script:
48 48 - echo "python used, $PYTHON"
49 49 - make rust-tests
50 50
51 51 rust-cargo-test-py3:
52 52 stage: tests
53 53 <<: *rust_cargo_test
54 54 variables:
55 55 PYTHON: python3
56 56
57 57 phabricator-refresh:
58 58 stage: phabricator
59 rules:
60 - if: '"$PHABRICATOR_TOKEN" != "NO-PHAB"'
61 when: on_success
62 - if: '"$PHABRICATOR_TOKEN" == "NO-PHAB"'
63 when: never
59 64 variables:
60 65 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
61 66 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
62 67 script:
63 68 - |
64 69 if [ `hg branch` == "stable" ]; then
65 70 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
66 71 else
67 72 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
68 73 fi
69 74
70 75 test-py2:
71 76 <<: *runtests
72 77 variables:
73 78 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
74 79 TEST_HGMODULEPOLICY: "c"
75 80 TEST_HGTESTS_ALLOW_NETIO: "1"
76 81
77 82 test-py3:
78 83 <<: *runtests
79 84 variables:
80 85 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
81 86 PYTHON: python3
82 87 TEST_HGMODULEPOLICY: "c"
83 88 TEST_HGTESTS_ALLOW_NETIO: "1"
84 89
85 90 test-py2-pure:
86 91 <<: *runtests
87 92 variables:
88 93 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
89 94 TEST_HGMODULEPOLICY: "py"
90 95
91 96 test-py3-pure:
92 97 <<: *runtests
93 98 variables:
94 99 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
95 100 PYTHON: python3
96 101 TEST_HGMODULEPOLICY: "py"
97 102
98 103 test-py2-rust:
99 104 <<: *runtests
100 105 variables:
101 106 HGWITHRUSTEXT: cpython
102 107 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
103 108 TEST_HGMODULEPOLICY: "rust+c"
104 109
105 110 test-py3-rust:
106 111 <<: *runtests
107 112 variables:
108 113 HGWITHRUSTEXT: cpython
109 114 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
110 115 PYTHON: python3
111 116 TEST_HGMODULEPOLICY: "rust+c"
112 117
113 118 test-py3-rhg:
114 119 <<: *runtests
115 120 variables:
116 121 HGWITHRUSTEXT: cpython
117 122 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
118 123 PYTHON: python3
119 124 TEST_HGMODULEPOLICY: "rust+c"
120 125
121 126 test-py2-chg:
122 127 <<: *runtests
123 128 variables:
124 129 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
125 130 TEST_HGMODULEPOLICY: "c"
126 131
127 132 test-py3-chg:
128 133 <<: *runtests
129 134 variables:
130 135 PYTHON: python3
131 136 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
132 137 TEST_HGMODULEPOLICY: "c"
133 138
134 139 check-pytype-py3:
135 140 extends: .runtests_template
136 141 before_script:
137 142 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
138 143 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
139 144 - cd /tmp/mercurial-ci/
140 145 - make local PYTHON=$PYTHON
141 146 - $PYTHON -m pip install --user -U pytype==2021.04.15
142 147 variables:
143 148 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
144 149 HGTEST_SLOWTIMEOUT: "3600"
145 150 PYTHON: python3
146 151 TEST_HGMODULEPOLICY: "c"
147 152
148 153 # `sh.exe --login` sets a couple of extra environment variables that are defined
149 154 # in the MinGW shell, but switches CWD to /home/$username. The previous value
150 155 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
151 156 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
152 157 # that satisfies the various shebang lines and delegates to `py -3`.
153 158 .window_runtests_template: &windows_runtests
154 159 <<: *all
155 160 when: manual # we don't have any Windows runners anymore at the moment
156 161 stage: tests
157 162 before_script:
158 163 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
159 164 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
160 165
161 166 script:
162 167 - echo "Entering script section"
163 168 - echo "python used, $Env:PYTHON"
164 169 - Invoke-Expression "$Env:PYTHON -V"
165 170 - Invoke-Expression "$Env:PYTHON -m black --version"
166 171 - echo "$Env:RUNTEST_ARGS"
167 172 - echo "$Env:TMP"
168 173 - echo "$Env:TEMP"
169 174
170 175 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
171 176
172 177 windows-py3:
173 178 <<: *windows_runtests
174 179 tags:
175 180 - windows
176 181 variables:
177 182 TEST_HGMODULEPOLICY: "c"
178 183 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
179 184 PYTHON: py -3
180 185
181 186 windows-py3-pyox:
182 187 <<: *windows_runtests
183 188 tags:
184 189 - windows
185 190 variables:
186 191 TEST_HGMODULEPOLICY: "c"
187 192 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
188 193 PYTHON: py -3
@@ -1,189 +1,198 b''
1 1 # install-dependencies.ps1 - Install Windows dependencies for building Mercurial
2 2 #
3 3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # This script can be used to bootstrap a Mercurial build environment on
9 9 # Windows.
10 10 #
11 11 # The script makes a lot of assumptions about how things should work.
12 12 # For example, the install location of Python is hardcoded to c:\hgdev\*.
13 13 #
14 14 # The script should be executed from a PowerShell with elevated privileges
15 15 # if you don't want to see a UAC prompt for various installers.
16 16 #
17 17 # The script is tested on Windows 10 and Windows Server 2019 (in EC2).
18 18
19 19 $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe"
20 20 $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139"
21 21
22 22 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe"
23 23 $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220"
24 24 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe"
25 25 $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987"
26 26
27 27 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10.exe"
28 28 $PYTHON38_x86_SHA256 = "ad07633a1f0cd795f3bf9da33729f662281df196b4567fa795829f3bb38a30ac"
29 29 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
30 30 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
31 31
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5.exe"
33 $PYTHON39_x86_SHA256 = "505129081a839b699a6ab9064b441ad922ef03767b5dd4241fd0c2166baf64de"
34 $PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5-amd64.exe"
35 $PYTHON39_x64_SHA256 = "84d5243088ba00c11e51905c704dbe041040dfff044f4e1ce5476844ee2e6eac"
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9.exe"
33 $PYTHON39_x86_SHA256 = "6646a5683adf14d35e8c53aab946895bc0f0b825f7acac3a62cc85ee7d0dc71a"
34 $PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9-amd64.exe"
35 $PYTHON39_x64_SHA256 = "137d59e5c0b01a8f1bdcba08344402ae658c81c6bf03b6602bd8b4e951ad0714"
36
37 $PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0.exe"
38 $PYTHON310_x86_SHA256 = "ea896eeefb1db9e12fb89ec77a6e28c9fe52b4a162a34c85d9688be2ec2392e8"
39 $PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe"
40 $PYTHON310_x64_SHA256 = "cb580eb7dc55f9198e650f016645023e8b2224cf7d033857d12880b46c5c94ef"
36 41
37 42 # PIP 19.2.3.
38 43 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
39 44 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
40 45
41 46 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
42 47 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
43 48
44 49 $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip"
45 50 $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF"
46 51
47 52 $MERCURIAL_WHEEL_FILENAME = "mercurial-5.8.1-cp39-cp39-win_amd64.whl"
48 53 $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/5c/b5/a5fa664761eef29b6c90eb24cb09ab8fe2c9b4b86af41d42c17476aff29b/$MERCURIAL_WHEEL_FILENAME"
49 54 $MERCURIAL_WHEEL_SHA256 = "cbf3efa68fd7ebf94691bd00d2c86bbd47ca73620c8faa4f18b6c394bf5f82b0"
50 55
51 56 $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe"
52 57 $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72"
53 58
54 59 $PYOXIDIZER_URL = "https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.17/PyOxidizer-0.17.0-x64.msi"
55 60 $PYOXIDIZER_SHA256 = "85c3bc21a18eb5e2db4dad87cca29accf725c7d59dd364a853ab5099c272024b"
56 61
57 62 # Writing progress slows down downloads substantially. So disable it.
58 63 $progressPreference = 'silentlyContinue'
59 64
60 65 function Secure-Download($url, $path, $sha256) {
61 66 if (Test-Path -Path $path) {
62 67 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
63 68
64 69 if ($hash.Hash -eq $sha256) {
65 70 Write-Output "SHA256 of $path verified as $sha256"
66 71 return
67 72 }
68 73
69 74 Write-Output "hash mismatch on $path; downloading again"
70 75 }
71 76
72 77 Write-Output "downloading $url to $path"
73 78 Invoke-WebRequest -Uri $url -OutFile $path
74 79 Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash
75 80
76 81 if ($hash.Hash -ne $sha256) {
77 82 Remove-Item -Path $path
78 83 throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256"
79 84 }
80 85 }
81 86
82 87 function Invoke-Process($path, $arguments) {
83 88 echo "$path $arguments"
84 89
85 90 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
86 91
87 92 if ($p.ExitCode -ne 0) {
88 93 throw "process exited non-0: $($p.ExitCode)"
89 94 }
90 95 }
91 96
92 97 function Install-Python3($name, $installer, $dest, $pip) {
93 98 Write-Output "installing $name"
94 99
95 100 # We hit this when running the script as part of Simple Systems Manager in
96 101 # EC2. The Python 3 installer doesn't seem to like per-user installs
97 102 # when running as the SYSTEM user. So enable global installs if executed in
98 103 # this mode.
99 104 if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") {
100 105 Write-Output "running with SYSTEM account; installing for all users"
101 106 $allusers = "1"
102 107 }
103 108 else {
104 109 $allusers = "0"
105 110 }
106 111
107 112 Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0"
108 113 Invoke-Process ${dest}\python.exe $pip
109 114 }
110 115
111 116 function Install-Rust($prefix) {
112 117 Write-Output "installing Rust"
113 118 $Env:RUSTUP_HOME = "${prefix}\rustup"
114 119 $Env:CARGO_HOME = "${prefix}\cargo"
115 120
116 121 Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc"
117 122 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc"
118 123 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.52.0"
119 124 Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy"
120 125 }
121 126
122 127 function Install-Dependencies($prefix) {
123 128 if (!(Test-Path -Path $prefix\assets)) {
124 129 New-Item -Path $prefix\assets -ItemType Directory
125 130 }
126 131
127 132 $pip = "${prefix}\assets\get-pip.py"
128 133
129 134 Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256
130 135 Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256
131 136 Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256
132 137 Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256
133 138 Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256
134 139 Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256
140 Secure-Download $PYTHON310_x86_URL ${prefix}\assets\python310-x86.exe $PYTHON310_x86_SHA256
141 Secure-Download $PYTHON310_x64_URL ${prefix}\assets\python310-x64.exe $PYTHON310_x64_SHA256
135 142 Secure-Download $PIP_URL ${pip} $PIP_SHA256
136 143 Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256
137 144 Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256
138 145 Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256
139 146 Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256
140 147 Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256
141 148 Secure-Download $PYOXIDIZER_URL ${prefix}\assets\PyOxidizer.msi $PYOXIDIZER_SHA256
142 149
143 150 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
144 151 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
145 152 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
146 153 # Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
147 154 Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
148 155 Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
156 Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
157 Install-Python3 "Python 3.10 64-bit" ${prefix}\assets\python310-x64.exe ${prefix}\python310-x64 ${pip}
149 158
150 159 Write-Output "installing Visual Studio 2017 Build Tools and SDKs"
151 160 Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140"
152 161
153 162 Write-Output "installing PyOxidizer"
154 163 Invoke-Process msiexec.exe "/i ${prefix}\assets\PyOxidizer.msi /l* ${prefix}\assets\PyOxidizer.log /quiet"
155 164
156 165 Install-Rust ${prefix}
157 166
158 167 Write-Output "installing Inno Setup"
159 168 Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES"
160 169
161 170 Write-Output "extracting MinGW base archive"
162 171 Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force
163 172
164 173 Write-Output "updating MinGW package catalogs"
165 174 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update"
166 175
167 176 Write-Output "installing MinGW packages"
168 177 Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip"
169 178
170 179 # Construct a virtualenv useful for bootstrapping. It conveniently contains a
171 180 # Mercurial install.
172 181 Write-Output "creating bootstrap virtualenv with Mercurial"
173 182 Invoke-Process "$prefix\python39-x64\python.exe" "-m venv ${prefix}\venv-bootstrap"
174 183 Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}"
175 184 }
176 185
177 186 function Clone-Mercurial-Repo($prefix, $repo_url, $dest) {
178 187 Write-Output "cloning $repo_url to $dest"
179 188 # TODO Figure out why CA verification isn't working in EC2 and remove
180 189 # --insecure.
181 190 Invoke-Process "${prefix}\venv-bootstrap\Scripts\python.exe" "${prefix}\venv-bootstrap\Scripts\hg clone --insecure $repo_url $dest"
182 191
183 192 # Mark repo as non-publishing by default for convenience.
184 193 Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false"
185 194 }
186 195
187 196 $prefix = "c:\hgdev"
188 197 Install-Dependencies $prefix
189 198 Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src
@@ -1,301 +1,328 b''
1 1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile with python 3.7
3 3 # To update, run:
4 4 #
5 5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py3.txt contrib/packaging/requirements-windows.txt.in
6 6 #
7 7 atomicwrites==1.4.0 \
8 8 --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \
9 --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a \
9 --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a
10 10 # via pytest
11 11 attrs==21.2.0 \
12 12 --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \
13 --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb \
13 --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb
14 14 # via pytest
15 15 cached-property==1.5.2 \
16 16 --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
17 --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 \
17 --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0
18 18 # via pygit2
19 19 certifi==2021.5.30 \
20 20 --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \
21 --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 \
21 --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8
22 22 # via dulwich
23 cffi==1.14.4 \
24 --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \
25 --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \
26 --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \
27 --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \
28 --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \
29 --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \
30 --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \
31 --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \
32 --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \
33 --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \
34 --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \
35 --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \
36 --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \
37 --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \
38 --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \
39 --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \
40 --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \
41 --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \
42 --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \
43 --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \
44 --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \
45 --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \
46 --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \
47 --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \
48 --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \
49 --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \
50 --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \
51 --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \
52 --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \
53 --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \
54 --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \
55 --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \
56 --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \
57 --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \
23 cffi==1.15.0 \
24 --hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \
25 --hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \
26 --hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \
27 --hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \
28 --hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \
29 --hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \
30 --hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \
31 --hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \
32 --hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \
33 --hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \
34 --hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \
35 --hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \
36 --hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \
37 --hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \
38 --hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \
39 --hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \
40 --hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \
41 --hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \
42 --hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \
43 --hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \
44 --hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \
45 --hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \
46 --hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \
47 --hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \
48 --hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \
49 --hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \
50 --hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \
51 --hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \
52 --hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \
53 --hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \
54 --hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \
55 --hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \
56 --hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \
57 --hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \
58 --hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \
59 --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \
60 --hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \
61 --hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \
62 --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \
63 --hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \
64 --hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \
65 --hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \
66 --hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \
67 --hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \
68 --hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \
69 --hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \
70 --hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \
71 --hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \
72 --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \
73 --hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796
58 74 # via pygit2
59 75 colorama==0.4.4 \
60 76 --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \
61 --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 \
77 --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2
62 78 # via pytest
63 79 docutils==0.16 \
64 80 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
65 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
81 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
66 82 # via -r contrib/packaging/requirements-windows.txt.in
67 83 dulwich==0.20.6 ; python_version >= "3" \
68 84 --hash=sha256:1ccd55e38fa9f169290f93e027ab4508202f5bdd6ef534facac4edd3f6903f0d \
69 85 --hash=sha256:2452a0379cc7bbbd7ab893ec104d18039f1ea98b0d6be6bca5646e5cf29e0ae9 \
70 86 --hash=sha256:2f4aebc54ed2d37dcee737024421452375570a422eb682232e676aa7ebc9cb4b \
71 87 --hash=sha256:304f52b10c49c3a6ddfbd73e2e93d8e979350225cfba9688e51110e74fa2f718 \
72 88 --hash=sha256:49e747c72d9099e873bf6196260346d5996c3f28af788294d47a8accdc524de7 \
73 89 --hash=sha256:4fee359928c59b53af153a582a7ed7595259a5a825df400301a29e17fd78dfd3 \
74 90 --hash=sha256:50ef300a9fa4efd9f85009c2bd8b515266ec1529400f8834f85c04fa9f09b2c0 \
75 91 --hash=sha256:5348310f21b2a23847342ce464461499b6652483fa42de03714d0f6421a99698 \
76 92 --hash=sha256:7e7b5dea5178b6493fdb83adccbe81de9ddff55f79880185ed594c0e3a97209b \
77 93 --hash=sha256:8f7a7f973be2beedfb10dd8d3eb6bdf9ec466c72ad555704897cbd6357fe5021 \
78 94 --hash=sha256:bea6e6caffc6c73bfd1647714c5715ab96ac49deb8beb8b67511529afa25685a \
79 95 --hash=sha256:e5871b86a079e9e290f52ab14559cea1b694a0b8ed2b9ebb898f6ced7f14a406 \
80 --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b \
96 --hash=sha256:e593f514b8ac740b4ceeb047745b4719bfc9f334904245c6edcb3a9d002f577b
81 97 # via -r contrib/packaging/requirements-windows.txt.in
82 98 fuzzywuzzy==0.18.0 \
83 --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \
99 --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8
84 100 # via -r contrib/packaging/requirements-windows.txt.in
85 101 idna==3.2 \
86 102 --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \
87 --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 \
103 --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3
88 104 # via yarl
89 105 importlib-metadata==3.1.0 \
90 106 --hash=sha256:590690d61efdd716ff82c39ca9a9d4209252adfe288a4b5721181050acbd4175 \
91 --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099 \
92 # via keyring, pluggy, pytest
107 --hash=sha256:d9b8a46a0885337627a6430db287176970fff18ad421becec1d64cfc763c2099
108 # via
109 # keyring
110 # pluggy
111 # pytest
93 112 iniconfig==1.1.1 \
94 113 --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
95 --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 \
114 --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32
96 115 # via pytest
97 116 keyring==21.4.0 \
98 117 --hash=sha256:4e34ea2fdec90c1c43d6610b5a5fafa1b9097db1802948e90caf5763974b8f8d \
99 --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466 \
118 --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466
100 119 # via -r contrib/packaging/requirements-windows.txt.in
101 120 multidict==5.1.0 \
102 121 --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \
103 122 --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \
104 123 --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \
105 124 --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \
106 125 --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \
107 126 --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \
108 127 --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \
109 128 --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \
110 129 --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \
111 130 --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \
112 131 --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \
113 132 --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \
114 133 --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \
115 134 --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \
116 135 --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \
117 136 --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \
118 137 --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \
119 138 --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \
120 139 --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \
121 140 --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \
122 141 --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \
123 142 --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \
124 143 --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \
125 144 --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \
126 145 --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \
127 146 --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \
128 147 --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \
129 148 --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \
130 149 --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \
131 150 --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \
132 151 --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \
133 152 --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \
134 153 --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \
135 154 --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \
136 155 --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \
137 156 --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \
138 --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 \
157 --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80
139 158 # via yarl
140 159 packaging==21.0 \
141 160 --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \
142 --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 \
161 --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14
143 162 # via pytest
144 163 pluggy==0.13.1 \
145 164 --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \
146 --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \
165 --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d
147 166 # via pytest
148 167 py==1.10.0 \
149 168 --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \
150 --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a \
169 --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a
151 170 # via pytest
152 pycparser==2.20 \
153 --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
154 --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \
171 pycparser==2.21 \
172 --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
173 --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
155 174 # via cffi
156 pygit2==1.4.0 ; python_version >= "3" \
157 --hash=sha256:0d298098e286eeda000e49ca7e1b41f87300e10dd8b9d06b32b008bd61f50b83 \
158 --hash=sha256:0ee135eb2cd8b07ce1374f3596cc5c3213472d6389bad6a4c5d87d8e267e93e9 \
159 --hash=sha256:32eb863d6651d4890ced318505ea8dc229bd9637deaf29c898de1ab574d727a0 \
160 --hash=sha256:37d6d7d6d7804c42a0fe23425c72e38093488525092fc5e51a05684e63503ce7 \
161 --hash=sha256:41204b6f3406d9f53147710f3cc485d77181ba67f57c34d36b7c86de1c14a18c \
162 --hash=sha256:818c91b582109d90580c5da74af783738838353f15eb12eeb734d80a974b05a3 \
163 --hash=sha256:8306a302487dac67df7af6a064bb37e8a8eb4138958f9560ff49ff162e185dab \
164 --hash=sha256:9c2f2d9ef59513007b66f6534b000792b614de3faf60313a0a68f6b8571aea85 \
165 --hash=sha256:9c8d5881eb709e2e2e13000b507a131bd5fb91a879581030088d0ddffbcd19af \
166 --hash=sha256:b422e417739def0a136a6355723dfe8a5ffc83db5098076f28a14f1d139779c1 \
167 --hash=sha256:cbeb38ab1df9b5d8896548a11e63aae8a064763ab5f1eabe4475e6b8a78ee1c8 \
168 --hash=sha256:cf00481ddf053e549a6edd0216bdc267b292d261eae02a67bb3737de920cbf88 \
169 --hash=sha256:d0d889144e9487d926fecea947c3f39ce5f477e521d7d467d2e66907e4cd657d \
170 --hash=sha256:ddb7a1f6d38063e8724abfa1cfdfb0f9b25014b8bca0546274b7a84b873a3888 \
171 --hash=sha256:e9037a7d810750fe23c9f5641ef14a0af2525ff03e14752cd4f73e1870ecfcb0 \
172 --hash=sha256:ec5c0365a9bdfcac1609d20868507b28685ec5ea7cc3a2c903c9b62ef2e0bbc0 \
173 --hash=sha256:fdd8ba30cda277290e000322f505132f590cf89bd7d31829b45a3cb57447ec32 \
175 pygit2==1.7.1 ; python_version >= "3" \
176 --hash=sha256:2c9e95efb86c0b32cc07c26be3d179e851ca4a7899c47fef63c4203963144f5e \
177 --hash=sha256:3ddacbf461652d3d4900382f821d9fbd5ae2dedecd7862b5245842419ad0ccba \
178 --hash=sha256:4cb0414df6089d0072ebe93ff2f34730737172dd5f0e72289567d06a6caf09c0 \
179 --hash=sha256:56e960dc74f4582bfa3ca17a1a9d542732fc93b5cf8f82574c235d06b2d61eae \
180 --hash=sha256:6b17ab922c2a2d99b30ab9222472b07732bf7261d9f9655a4ea23b4c700049d8 \
181 --hash=sha256:73a7b471f22cb59e8729016de1f447c472b3b2c1cc2b622194e5e3b48a7f5776 \
182 --hash=sha256:761a8850e33822796c1c24d411d5cc2460c04e1a74b04ae8560efd3596bbd6bd \
183 --hash=sha256:7c467e81158f5827b3bca6362e5cc9b92857eff9de65034d338c1f18524b09be \
184 --hash=sha256:7c56e10592e62610a19bd3e2a633aafe3488c57b906c7c2fde0299937f0f0b2f \
185 --hash=sha256:7cc2a8e29cc9598310a78cf58b70d9331277cf374802be8f97d97c4a9e5d8387 \
186 --hash=sha256:812670f7994f31778e873a9eced29d2bbfa91674e8be0ab1e974c8a4bda9cbab \
187 --hash=sha256:8cdb0b1d6c3d24b44f340fed143b16e64ba23fe2a449f1a5db87aaf9339a9dbe \
188 --hash=sha256:91b77a305d8d18b649396e66e832d654cd593a3d29b5728f753f254a04533812 \
189 --hash=sha256:a75bcde32238c77eb0cf7d9698a5aa899408d7ad999a5920a29a7c4b80fdeaa7 \
190 --hash=sha256:b060240cf3038e7a0706bbfc5436dd03b8d5ac797ac1d512b613f4d04b974c80 \
191 --hash=sha256:cdfa61c0428a8182e5a6a1161c017b824cd511574f080a40b10d6413774eb0ca \
192 --hash=sha256:d7faa29558436decc2e78110f38d6677eb366b683ba5cdc2803d47195711165d \
193 --hash=sha256:d831825ad9c3b3c28e6b3ef8a2401ad2d3fd4db5455427ff27175a7e254e2592 \
194 --hash=sha256:df4c477bdfac85d32a1e3180282cd829a0980aa69be9bd0f7cbd4db1778ca72b \
195 --hash=sha256:eced3529bafcaaac015d08dfaa743b3cbad37fcd5b13ae9d280b8b7f716ec5ce \
196 --hash=sha256:fec17e2da668e6bb192d777417aad9c7ca924a166d0a0b9a81a11e00362b1bc7
174 197 # via -r contrib/packaging/requirements-windows.txt.in
175 198 pygments==2.7.1 \
176 199 --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
177 --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \
200 --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7
178 201 # via -r contrib/packaging/requirements-windows.txt.in
179 202 pyparsing==2.4.7 \
180 203 --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
181 --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b \
204 --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
182 205 # via packaging
183 pytest-vcr==1.0.2 \
184 --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896 \
185 # via -r contrib/packaging/requirements-windows.txt.in
186 206 pytest==6.2.4 \
187 207 --hash=sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b \
188 --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890 \
208 --hash=sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890
189 209 # via pytest-vcr
210 pytest-vcr==1.0.2 \
211 --hash=sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896
212 # via -r contrib/packaging/requirements-windows.txt.in
190 213 pywin32-ctypes==0.2.0 \
191 214 --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
192 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \
193 # via -r contrib/packaging/requirements-windows.txt.in, keyring
215 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
216 # via
217 # -r contrib/packaging/requirements-windows.txt.in
218 # keyring
194 219 pyyaml==5.4.1 \
195 220 --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
196 221 --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
197 222 --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
198 223 --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
199 224 --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
200 225 --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
201 226 --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
202 227 --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
203 228 --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
204 229 --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
205 230 --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \
206 231 --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
207 232 --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
208 233 --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
209 234 --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
210 235 --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
211 236 --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
212 237 --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
213 238 --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
214 239 --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
215 240 --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
216 241 --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
217 242 --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
218 243 --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
219 244 --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
220 245 --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
221 246 --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
222 247 --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
223 --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 \
248 --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
224 249 # via vcrpy
225 250 six==1.16.0 \
226 251 --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
227 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 \
252 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
228 253 # via vcrpy
229 254 toml==0.10.2 \
230 255 --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
231 --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f \
256 --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f
232 257 # via pytest
233 258 typing-extensions==3.10.0.0 \
234 259 --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \
235 260 --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \
236 --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 \
261 --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84
237 262 # via yarl
238 263 urllib3==1.25.11 \
239 264 --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
240 --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
265 --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e
241 266 # via dulwich
242 267 vcrpy==4.1.1 \
243 268 --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \
244 --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 \
269 --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599
245 270 # via pytest-vcr
246 windows-curses==2.2.0 \
247 --hash=sha256:1452d771ec6f9b3fef037da2b169196a9a12be4e86a6c27dd579adac70c42028 \
248 --hash=sha256:267544e4f60c09af6505e50a69d7f01d7f8a281cf4bd4fc7efc3b32b9a4ef64e \
249 --hash=sha256:389228a3df556102e72450f599283094168aa82eee189f501ad9f131a0fc92e1 \
250 --hash=sha256:84336fe470fa07288daec5c684dec74c0766fec6b3511ccedb4c494804acfbb7 \
251 --hash=sha256:9aa6ff60be76f5de696dc6dbf7897e3b1e6abcf4c0f741e9a0ee22cd6ef382f8 \
252 --hash=sha256:c4a8ce00e82635f06648cc40d99f470be4e3ffeb84f9f7ae9d6a4f68ec6361e7 \
253 --hash=sha256:c5cd032bc7d0f03224ab55c925059d98e81795098d59bbd10f7d05c7ea9677ce \
254 --hash=sha256:fc0be372fe6da3c39d7093154ce029115a927bf287f34b4c615e2b3f8c23dfaa \
271 windows-curses==2.3.0 \
272 --hash=sha256:170c0d941c2e0cdf864e7f0441c1bdf0709232bf4aa7ce7f54d90fc76a4c0504 \
273 --hash=sha256:4d5fb991d1b90a41c2332f02241a1f84c8a1e6bc8f6e0d26f532d0da7a9f7b51 \
274 --hash=sha256:7a35eda4cb120b9e1a5ae795f3bc06c55b92c9d391baba6be1903285a05f3551 \
275 --hash=sha256:935be95cfdb9213f6f5d3d5bcd489960e3a8fbc9b574e7b2e8a3a3cc46efff49 \
276 --hash=sha256:a3a63a0597729e10f923724c2cf972a23ea677b400d2387dee1d668cf7116177 \
277 --hash=sha256:c860f596d28377e47f322b7382be4d3573fd76d1292234996bb7f72e0bc0ed0d \
278 --hash=sha256:cc5fa913780d60f4a40824d374a4f8ca45b4e205546e83a2d85147315a57457e \
279 --hash=sha256:d5cde8ec6d582aa77af791eca54f60858339fb3f391945f9cad11b1ab71062e3 \
280 --hash=sha256:e913dc121446d92b33fe4f5bcca26d3a34e4ad19f2af160370d57c3d1e93b4e1 \
281 --hash=sha256:fbc2131cec57e422c6660e6cdb3420aff5be5169b8e45bb7c471f884b0590a2b
255 282 # via -r contrib/packaging/requirements-windows.txt.in
256 283 wrapt==1.12.1 \
257 --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 \
284 --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7
258 285 # via vcrpy
259 286 yarl==1.6.3 \
260 287 --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \
261 288 --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \
262 289 --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \
263 290 --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \
264 291 --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \
265 292 --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \
266 293 --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \
267 294 --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \
268 295 --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \
269 296 --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \
270 297 --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \
271 298 --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \
272 299 --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \
273 300 --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \
274 301 --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \
275 302 --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \
276 303 --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \
277 304 --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \
278 305 --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \
279 306 --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \
280 307 --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \
281 308 --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \
282 309 --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \
283 310 --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \
284 311 --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \
285 312 --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \
286 313 --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \
287 314 --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \
288 315 --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \
289 316 --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \
290 317 --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \
291 318 --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \
292 319 --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \
293 320 --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \
294 321 --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \
295 322 --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \
296 --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 \
323 --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71
297 324 # via vcrpy
298 325 zipp==3.4.0 \
299 326 --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \
300 --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb \
327 --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb
301 328 # via importlib-metadata
@@ -1,49 +1,49 b''
1 1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile with python 3.7
3 3 # To update, run:
4 4 #
5 5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
6 6 #
7 7 docutils==0.16 \
8 8 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
9 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
9 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
10 10 # via -r contrib/packaging/requirements.txt.in
11 11 jinja2==2.11.2 \
12 12 --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
13 --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
13 --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035
14 14 # via -r contrib/packaging/requirements.txt.in
15 15 markupsafe==1.1.1 \
16 16 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
17 17 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
18 18 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
19 19 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
20 20 --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
21 21 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
22 22 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
23 23 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
24 24 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
25 25 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
26 26 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
27 27 --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
28 28 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
29 29 --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
30 30 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
31 31 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
32 32 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
33 33 --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
34 34 --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
35 35 --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
36 36 --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
37 37 --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
38 38 --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
39 39 --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
40 40 --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
41 41 --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
42 42 --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
43 43 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
44 44 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
45 45 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
46 46 --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
47 47 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
48 --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
48 --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be
49 49 # via jinja2
@@ -1,41 +1,47 b''
1 1 #!/bin/bash
2 2 set -eu
3 3
4 if [[ "$PHABRICATOR_TOKEN" == "NO-PHAB" ]]; then
5 echo 'Skipping Phabricator Step' >&2
6 exit 0
7 fi
8
4 9 revision_in_stack=`hg log \
5 10 --rev '.#stack and ::. and topic()' \
6 11 -T '\nONE-REV\n' \
7 12 | grep 'ONE-REV' | wc -l`
8 13 revision_on_phab=`hg log \
9 14 --rev '.#stack and ::. and topic() and desc("re:\nDifferential Revision: [^\n]+D\d+$")'\
10 15 -T '\nONE-REV\n' \
11 16 | grep 'ONE-REV' | wc -l`
12 17
13 18 if [[ $revision_in_stack -eq 0 ]]; then
14 19 echo "stack is empty" >&2
15 20 exit 0
16 21 fi
17 22
18 23 if [[ $revision_on_phab -eq 0 ]]; then
19 24 echo "no tracked diff in this stack" >&2
20 25 exit 0
21 26 fi
22 27
23 28 if [[ $revision_on_phab -lt $revision_in_stack ]]; then
24 29 echo "not all stack changesets (${revision_in_stack}) have matching Phabricator Diff (${revision_on_phab})" >&2
25 30 exit 2
26 31 fi
27 32
28 33 if [[ "$PHABRICATOR_TOKEN" == "" ]]; then
29 34 echo 'missing $PHABRICATOR_TOKEN variable' >&2
35 echo '(use PHABRICATOR_TOKEN="NO-PHAB" to disable this step)' >&2
30 36 exit 2
31 37 fi
32 38
33 39 hg \
34 40 --config extensions.phabricator= \
35 41 --config phabricator.url=https://phab.mercurial-scm.org/ \
36 42 --config phabricator.callsign=HG \
37 43 --config auth.phabricator.schemes=https \
38 44 --config auth.phabricator.prefix=phab.mercurial-scm.org \
39 45 --config auth.phabricator.phabtoken=$PHABRICATOR_TOKEN \
40 46 phabsend --rev '.#stack and ::. and topic()' \
41 47 "$@"
@@ -1,102 +1,141 b''
1 1 #!/usr/bin/env python3
2 2 from __future__ import absolute_import
3 3
4 4 import getopt
5 5 import sys
6 6
7 7 import hgdemandimport
8 8
9 9 hgdemandimport.enable()
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial import (
13 13 context,
14 14 error,
15 15 fancyopts,
16 pycompat,
17 16 simplemerge,
18 17 ui as uimod,
18 util,
19 19 )
20 20 from mercurial.utils import procutil, stringutil
21 21
22 22 options = [
23 23 (b'L', b'label', [], _(b'labels to use on conflict markers')),
24 24 (b'a', b'text', None, _(b'treat all files as text')),
25 25 (b'p', b'print', None, _(b'print results instead of overwriting LOCAL')),
26 26 (b'', b'no-minimal', None, _(b'no effect (DEPRECATED)')),
27 27 (b'h', b'help', None, _(b'display help and exit')),
28 28 (b'q', b'quiet', None, _(b'suppress output')),
29 29 ]
30 30
31 31 usage = _(
32 32 b'''simplemerge [OPTS] LOCAL BASE OTHER
33 33
34 34 Simple three-way file merge utility with a minimal feature set.
35 35
36 36 Apply to LOCAL the changes necessary to go from BASE to OTHER.
37 37
38 38 By default, LOCAL is overwritten with the results of this operation.
39 39 '''
40 40 )
41 41
42 42
43 43 class ParseError(Exception):
44 44 """Exception raised on errors in parsing the command line."""
45 45
46 46
47 47 def showhelp():
48 48 procutil.stdout.write(usage)
49 49 procutil.stdout.write(b'\noptions:\n')
50 50
51 51 out_opts = []
52 52 for shortopt, longopt, default, desc in options:
53 53 out_opts.append(
54 54 (
55 55 b'%2s%s'
56 56 % (
57 57 shortopt and b'-%s' % shortopt,
58 58 longopt and b' --%s' % longopt,
59 59 ),
60 60 b'%s' % desc,
61 61 )
62 62 )
63 63 opts_len = max([len(opt[0]) for opt in out_opts])
64 64 for first, second in out_opts:
65 65 procutil.stdout.write(b' %-*s %s\n' % (opts_len, first, second))
66 66
67 67
68 def _verifytext(input, ui, quiet=False, allow_binary=False):
69 """verifies that text is non-binary (unless opts[text] is passed,
70 then we just warn)"""
71 if stringutil.binary(input.text()):
72 msg = _(b"%s looks like a binary file.") % input.fctx.path()
73 if not quiet:
74 ui.warn(_(b'warning: %s\n') % msg)
75 if not allow_binary:
76 sys.exit(1)
77
78
68 79 try:
69 80 for fp in (sys.stdin, procutil.stdout, sys.stderr):
70 81 procutil.setbinary(fp)
71 82
72 83 opts = {}
73 84 try:
74 85 bargv = [a.encode('utf8') for a in sys.argv[1:]]
75 86 args = fancyopts.fancyopts(bargv, options, opts)
76 87 except getopt.GetoptError as e:
77 88 raise ParseError(e)
78 89 if opts[b'help']:
79 90 showhelp()
80 91 sys.exit(0)
81 92 if len(args) != 3:
82 93 raise ParseError(_(b'wrong number of arguments').decode('utf8'))
94 mode = b'merge'
95 if len(opts[b'label']) > 2:
96 mode = b'merge3'
83 97 local, base, other = args
84 sys.exit(
85 simplemerge.simplemerge(
86 uimod.ui.load(),
87 context.arbitraryfilectx(local),
88 context.arbitraryfilectx(base),
89 context.arbitraryfilectx(other),
90 **pycompat.strkwargs(opts)
91 )
98 overrides = opts[b'label']
99 if len(overrides) > 3:
100 raise error.InputError(b'can only specify three labels.')
101 labels = [local, other, base]
102 labels[: len(overrides)] = overrides
103 local_input = simplemerge.MergeInput(
104 context.arbitraryfilectx(local), labels[0]
105 )
106 other_input = simplemerge.MergeInput(
107 context.arbitraryfilectx(other), labels[1]
108 )
109 base_input = simplemerge.MergeInput(
110 context.arbitraryfilectx(base), labels[2]
92 111 )
112
113 quiet = opts.get(b'quiet')
114 allow_binary = opts.get(b'text')
115 ui = uimod.ui.load()
116 _verifytext(local_input, ui, quiet=quiet, allow_binary=allow_binary)
117 _verifytext(base_input, ui, quiet=quiet, allow_binary=allow_binary)
118 _verifytext(other_input, ui, quiet=quiet, allow_binary=allow_binary)
119
120 merged_text, conflicts = simplemerge.simplemerge(
121 local_input,
122 base_input,
123 other_input,
124 mode,
125 allow_binary=allow_binary,
126 )
127 if opts.get(b'print'):
128 ui.fout.write(merged_text)
129 else:
130 util.writefile(local, merged_text)
131 sys.exit(1 if conflicts else 0)
93 132 except ParseError as e:
94 133 e = stringutil.forcebytestr(e)
95 134 procutil.stdout.write(b"%s: %s\n" % (sys.argv[0].encode('utf8'), e))
96 135 showhelp()
97 136 sys.exit(1)
98 137 except error.Abort as e:
99 138 procutil.stderr.write(b"abort: %s\n" % e)
100 139 sys.exit(255)
101 140 except KeyboardInterrupt:
102 141 sys.exit(255)
@@ -1,232 +1,241 b''
1 1 # blackbox.py - log repository events to a file for post-mortem debugging
2 2 #
3 3 # Copyright 2010 Nicolas Dumazet
4 4 # Copyright 2013 Facebook, Inc.
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """log repository events to a blackbox for debugging
10 10
11 11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 12 The events that get logged can be configured via the blackbox.track and
13 13 blackbox.ignore config keys.
14 14
15 15 Examples::
16 16
17 17 [blackbox]
18 18 track = *
19 19 ignore = pythonhook
20 20 # dirty is *EXPENSIVE* (slow);
21 21 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
22 22 dirty = True
23 23 # record the source of log messages
24 24 logsource = True
25 25
26 26 [blackbox]
27 27 track = command, commandfinish, commandexception, exthook, pythonhook
28 28
29 29 [blackbox]
30 30 track = incoming
31 31
32 32 [blackbox]
33 33 # limit the size of a log file
34 34 maxsize = 1.5 MB
35 35 # rotate up to N log files when the current one gets too big
36 36 maxfiles = 3
37 37
38 38 [blackbox]
39 # Include nanoseconds in log entries with %f (see Python function
39 # Include microseconds in log entries with %f (see Python function
40 40 # datetime.datetime.strftime)
41 41 date-format = %Y-%m-%d @ %H:%M:%S.%f
42 42
43 43 """
44 44
45 45 from __future__ import absolute_import
46 46
47 47 import re
48 48
49 49 from mercurial.i18n import _
50 50 from mercurial.node import hex
51 51
52 52 from mercurial import (
53 53 encoding,
54 54 loggingutil,
55 55 registrar,
56 56 )
57 57 from mercurial.utils import (
58 58 dateutil,
59 59 procutil,
60 60 )
61 61
62 62 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
63 63 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
64 64 # be specifying the version(s) of Mercurial they are tested with, or
65 65 # leave the attribute unspecified.
66 66 testedwith = b'ships-with-hg-core'
67 67
68 68 cmdtable = {}
69 69 command = registrar.command(cmdtable)
70 70
71 71 configtable = {}
72 72 configitem = registrar.configitem(configtable)
73 73
74 74 configitem(
75 75 b'blackbox',
76 76 b'dirty',
77 77 default=False,
78 78 )
79 79 configitem(
80 80 b'blackbox',
81 81 b'maxsize',
82 82 default=b'1 MB',
83 83 )
84 84 configitem(
85 85 b'blackbox',
86 86 b'logsource',
87 87 default=False,
88 88 )
89 89 configitem(
90 90 b'blackbox',
91 91 b'maxfiles',
92 92 default=7,
93 93 )
94 94 configitem(
95 95 b'blackbox',
96 96 b'track',
97 97 default=lambda: [b'*'],
98 98 )
99 99 configitem(
100 100 b'blackbox',
101 101 b'ignore',
102 102 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
103 103 )
104 configitem(
105 b'blackbox',
106 b'date-format',
107 default=b'%Y/%m/%d %H:%M:%S',
108 )
104 configitem(b'blackbox', b'date-format', default=b'')
109 105
110 106 _lastlogger = loggingutil.proxylogger()
111 107
112 108
113 109 class blackboxlogger(object):
114 110 def __init__(self, ui, repo):
115 111 self._repo = repo
116 112 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
117 113 self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore'))
118 114 self._maxfiles = ui.configint(b'blackbox', b'maxfiles')
119 115 self._maxsize = ui.configbytes(b'blackbox', b'maxsize')
120 116 self._inlog = False
121 117
122 118 def tracked(self, event):
123 119 return (
124 120 b'*' in self._trackedevents and event not in self._ignoredevents
125 121 ) or event in self._trackedevents
126 122
127 123 def log(self, ui, event, msg, opts):
128 124 # self._log() -> ctx.dirty() may create new subrepo instance, which
129 125 # ui is derived from baseui. So the recursion guard in ui.log()
130 126 # doesn't work as it's local to the ui instance.
131 127 if self._inlog:
132 128 return
133 129 self._inlog = True
134 130 try:
135 131 self._log(ui, event, msg, opts)
136 132 finally:
137 133 self._inlog = False
138 134
139 135 def _log(self, ui, event, msg, opts):
140 136 default = ui.configdate(b'devel', b'default-date')
141 date = dateutil.datestr(default, ui.config(b'blackbox', b'date-format'))
137 dateformat = ui.config(b'blackbox', b'date-format')
138 if dateformat:
139 date = dateutil.datestr(default, dateformat)
140 else:
141 # We want to display milliseconds (more precision seems
142 # unnecessary). Since %.3f is not supported, use %f and truncate
143 # microseconds.
144 date = dateutil.datestr(default, b'%Y-%m-%d %H:%M:%S.%f')[:-3]
142 145 user = procutil.getuser()
143 146 pid = b'%d' % procutil.getpid()
144 147 changed = b''
145 148 ctx = self._repo[None]
146 149 parents = ctx.parents()
147 150 rev = b'+'.join([hex(p.node()) for p in parents])
148 151 if ui.configbool(b'blackbox', b'dirty') and ctx.dirty(
149 152 missing=True, merge=False, branch=False
150 153 ):
151 154 changed = b'+'
152 155 if ui.configbool(b'blackbox', b'logsource'):
153 156 src = b' [%s]' % event
154 157 else:
155 158 src = b''
156 159 try:
157 160 fmt = b'%s %s @%s%s (%s)%s> %s'
158 161 args = (date, user, rev, changed, pid, src, msg)
159 162 with loggingutil.openlogfile(
160 163 ui,
161 164 self._repo.vfs,
162 165 name=b'blackbox.log',
163 166 maxfiles=self._maxfiles,
164 167 maxsize=self._maxsize,
165 168 ) as fp:
166 169 fp.write(fmt % args)
167 170 except (IOError, OSError) as err:
168 171 # deactivate this to avoid failed logging again
169 172 self._trackedevents.clear()
170 173 ui.debug(
171 174 b'warning: cannot write to blackbox.log: %s\n'
172 175 % encoding.strtolocal(err.strerror)
173 176 )
174 177 return
175 178 _lastlogger.logger = self
176 179
177 180
178 181 def uipopulate(ui):
179 182 ui.setlogger(b'blackbox', _lastlogger)
180 183
181 184
182 185 def reposetup(ui, repo):
183 186 # During 'hg pull' a httppeer repo is created to represent the remote repo.
184 187 # It doesn't have a .hg directory to put a blackbox in, so we don't do
185 188 # the blackbox setup for it.
186 189 if not repo.local():
187 190 return
188 191
189 192 # Since blackbox.log is stored in the repo directory, the logger should be
190 193 # instantiated per repository.
191 194 logger = blackboxlogger(ui, repo)
192 195 ui.setlogger(b'blackbox', logger)
193 196
194 197 # Set _lastlogger even if ui.log is not called. This gives blackbox a
195 198 # fallback place to log
196 199 if _lastlogger.logger is None:
197 200 _lastlogger.logger = logger
198 201
199 202 repo._wlockfreeprefix.add(b'blackbox.log')
200 203
201 204
202 205 @command(
203 206 b'blackbox',
204 207 [
205 208 (b'l', b'limit', 10, _(b'the number of events to show')),
206 209 ],
207 210 _(b'hg blackbox [OPTION]...'),
208 211 helpcategory=command.CATEGORY_MAINTENANCE,
209 212 helpbasic=True,
210 213 )
211 214 def blackbox(ui, repo, *revs, **opts):
212 215 """view the recent repository events"""
213 216
214 217 if not repo.vfs.exists(b'blackbox.log'):
215 218 return
216 219
217 220 limit = opts.get('limit')
218 221 fp = repo.vfs(b'blackbox.log', b'r')
219 222 lines = fp.read().split(b'\n')
220 223
221 224 count = 0
222 225 output = []
223 226 for line in reversed(lines):
224 227 if count >= limit:
225 228 break
226 229
227 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
228 if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
230 # count the commands by matching lines like:
231 # 2013/01/23 19:13:36 root>
232 # 2013/01/23 19:13:36 root (1234)>
233 # 2013/01/23 19:13:36 root @0000000000000000000000000000000000000000 (1234)>
234 # 2013-01-23 19:13:36.000 root @0000000000000000000000000000000000000000 (1234)>
235 if re.match(
236 br'^\d{4}[-/]\d{2}[-/]\d{2} \d{2}:\d{2}:\d{2}(.\d*)? .*> .*', line
237 ):
229 238 count += 1
230 239 output.append(line)
231 240
232 241 ui.status(b'\n'.join(reversed(output)))
@@ -1,89 +1,89 b''
1 1 # commitextras.py
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''adds a new flag extras to commit (ADVANCED)'''
9 9
10 10 from __future__ import absolute_import
11 11
12 12 import re
13 13
14 14 from mercurial.i18n import _
15 15 from mercurial import (
16 16 commands,
17 17 error,
18 18 extensions,
19 19 registrar,
20 20 util,
21 21 )
22 22
23 23 cmdtable = {}
24 24 command = registrar.command(cmdtable)
25 25 testedwith = b'ships-with-hg-core'
26 26
27 27 usedinternally = {
28 28 b'amend_source',
29 29 b'branch',
30 30 b'close',
31 31 b'histedit_source',
32 32 b'topic',
33 33 b'rebase_source',
34 34 b'intermediate-source',
35 35 b'__touch-noise__',
36 36 b'source',
37 37 b'transplant_source',
38 38 }
39 39
40 40
41 41 def extsetup(ui):
42 42 entry = extensions.wrapcommand(commands.table, b'commit', _commit)
43 43 options = entry[1]
44 44 options.append(
45 45 (
46 46 b'',
47 47 b'extra',
48 48 [],
49 49 _(b'set a changeset\'s extra values'),
50 50 _(b"KEY=VALUE"),
51 51 )
52 52 )
53 53
54 54
55 55 def _commit(orig, ui, repo, *pats, **opts):
56 56 if util.safehasattr(repo, 'unfiltered'):
57 57 repo = repo.unfiltered()
58 58
59 59 class repoextra(repo.__class__):
60 60 def commit(self, *innerpats, **inneropts):
61 61 extras = opts.get('extra')
62 62 for raw in extras:
63 63 if b'=' not in raw:
64 64 msg = _(
65 65 b"unable to parse '%s', should follow "
66 66 b"KEY=VALUE format"
67 67 )
68 raise error.Abort(msg % raw)
68 raise error.InputError(msg % raw)
69 69 k, v = raw.split(b'=', 1)
70 70 if not k:
71 71 msg = _(b"unable to parse '%s', keys can't be empty")
72 raise error.Abort(msg % raw)
72 raise error.InputError(msg % raw)
73 73 if re.search(br'[^\w-]', k):
74 74 msg = _(
75 75 b"keys can only contain ascii letters, digits,"
76 76 b" '_' and '-'"
77 77 )
78 raise error.Abort(msg)
78 raise error.InputError(msg)
79 79 if k in usedinternally:
80 80 msg = _(
81 81 b"key '%s' is used internally, can't be set "
82 82 b"manually"
83 83 )
84 raise error.Abort(msg % k)
84 raise error.InputError(msg % k)
85 85 inneropts['extra'][k] = v
86 86 return super(repoextra, self).commit(*innerpats, **inneropts)
87 87
88 88 repo.__class__ = repoextra
89 89 return orig(ui, repo, *pats, **opts)
@@ -1,732 +1,733 b''
1 1 # hg.py - hg backend for convert extension
2 2 #
3 3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # Notes for hg->hg conversion:
9 9 #
10 10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 11 # of commit messages, but new versions do. Changesets created by
12 12 # those older versions, then converted, may thus have different
13 13 # hashes for changesets that are otherwise identical.
14 14 #
15 15 # * Using "--config convert.hg.saverev=true" will make the source
16 16 # identifier to be stored in the converted revision. This will cause
17 17 # the converted revision to have a different identity than the
18 18 # source.
19 19 from __future__ import absolute_import
20 20
21 21 import os
22 22 import re
23 23 import time
24 24
25 25 from mercurial.i18n import _
26 26 from mercurial.pycompat import open
27 27 from mercurial.node import (
28 28 bin,
29 29 hex,
30 30 sha1nodeconstants,
31 31 )
32 32 from mercurial import (
33 33 bookmarks,
34 34 context,
35 35 error,
36 36 exchange,
37 37 hg,
38 38 lock as lockmod,
39 39 logcmdutil,
40 40 merge as mergemod,
41 mergestate,
41 42 phases,
42 43 pycompat,
43 44 util,
44 45 )
45 46 from mercurial.utils import dateutil
46 47
47 48 stringio = util.stringio
48 49
49 50 from . import common
50 51
51 52 mapfile = common.mapfile
52 53 NoRepo = common.NoRepo
53 54
54 55 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
55 56
56 57
57 58 class mercurial_sink(common.converter_sink):
58 59 def __init__(self, ui, repotype, path):
59 60 common.converter_sink.__init__(self, ui, repotype, path)
60 61 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
61 62 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
62 63 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
63 64 self.lastbranch = None
64 65 if os.path.isdir(path) and len(os.listdir(path)) > 0:
65 66 try:
66 67 self.repo = hg.repository(self.ui, path)
67 68 if not self.repo.local():
68 69 raise NoRepo(
69 70 _(b'%s is not a local Mercurial repository') % path
70 71 )
71 72 except error.RepoError as err:
72 73 ui.traceback()
73 74 raise NoRepo(err.args[0])
74 75 else:
75 76 try:
76 77 ui.status(_(b'initializing destination %s repository\n') % path)
77 78 self.repo = hg.repository(self.ui, path, create=True)
78 79 if not self.repo.local():
79 80 raise NoRepo(
80 81 _(b'%s is not a local Mercurial repository') % path
81 82 )
82 83 self.created.append(path)
83 84 except error.RepoError:
84 85 ui.traceback()
85 86 raise NoRepo(
86 87 _(b"could not create hg repository %s as sink") % path
87 88 )
88 89 self.lock = None
89 90 self.wlock = None
90 91 self.filemapmode = False
91 92 self.subrevmaps = {}
92 93
93 94 def before(self):
94 95 self.ui.debug(b'run hg sink pre-conversion action\n')
95 96 self.wlock = self.repo.wlock()
96 97 self.lock = self.repo.lock()
97 98
98 99 def after(self):
99 100 self.ui.debug(b'run hg sink post-conversion action\n')
100 101 if self.lock:
101 102 self.lock.release()
102 103 if self.wlock:
103 104 self.wlock.release()
104 105
105 106 def revmapfile(self):
106 107 return self.repo.vfs.join(b"shamap")
107 108
108 109 def authorfile(self):
109 110 return self.repo.vfs.join(b"authormap")
110 111
111 112 def setbranch(self, branch, pbranches):
112 113 if not self.clonebranches:
113 114 return
114 115
115 116 setbranch = branch != self.lastbranch
116 117 self.lastbranch = branch
117 118 if not branch:
118 119 branch = b'default'
119 120 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
120 121
121 122 branchpath = os.path.join(self.path, branch)
122 123 if setbranch:
123 124 self.after()
124 125 try:
125 126 self.repo = hg.repository(self.ui, branchpath)
126 127 except Exception:
127 128 self.repo = hg.repository(self.ui, branchpath, create=True)
128 129 self.before()
129 130
130 131 # pbranches may bring revisions from other branches (merge parents)
131 132 # Make sure we have them, or pull them.
132 133 missings = {}
133 134 for b in pbranches:
134 135 try:
135 136 self.repo.lookup(b[0])
136 137 except Exception:
137 138 missings.setdefault(b[1], []).append(b[0])
138 139
139 140 if missings:
140 141 self.after()
141 142 for pbranch, heads in sorted(pycompat.iteritems(missings)):
142 143 pbranchpath = os.path.join(self.path, pbranch)
143 144 prepo = hg.peer(self.ui, {}, pbranchpath)
144 145 self.ui.note(
145 146 _(b'pulling from %s into %s\n') % (pbranch, branch)
146 147 )
147 148 exchange.pull(
148 149 self.repo, prepo, heads=[prepo.lookup(h) for h in heads]
149 150 )
150 151 self.before()
151 152
152 153 def _rewritetags(self, source, revmap, data):
153 154 fp = stringio()
154 155 for line in data.splitlines():
155 156 s = line.split(b' ', 1)
156 157 if len(s) != 2:
157 158 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
158 159 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
159 160 continue
160 161 revid = revmap.get(source.lookuprev(s[0]))
161 162 if not revid:
162 163 if s[0] == sha1nodeconstants.nullhex:
163 164 revid = s[0]
164 165 else:
165 166 # missing, but keep for hash stability
166 167 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
167 168 fp.write(b'%s\n' % line)
168 169 continue
169 170 fp.write(b'%s %s\n' % (revid, s[1]))
170 171 return fp.getvalue()
171 172
172 173 def _rewritesubstate(self, source, data):
173 174 fp = stringio()
174 175 for line in data.splitlines():
175 176 s = line.split(b' ', 1)
176 177 if len(s) != 2:
177 178 continue
178 179
179 180 revid = s[0]
180 181 subpath = s[1]
181 182 if revid != sha1nodeconstants.nullhex:
182 183 revmap = self.subrevmaps.get(subpath)
183 184 if revmap is None:
184 185 revmap = mapfile(
185 186 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
186 187 )
187 188 self.subrevmaps[subpath] = revmap
188 189
189 190 # It is reasonable that one or more of the subrepos don't
190 191 # need to be converted, in which case they can be cloned
191 192 # into place instead of converted. Therefore, only warn
192 193 # once.
193 194 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
194 195 if len(revmap) == 0:
195 196 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
196 197
197 198 if self.repo.wvfs.exists(sub):
198 199 self.ui.warn(msg % subpath)
199 200
200 201 newid = revmap.get(revid)
201 202 if not newid:
202 203 if len(revmap) > 0:
203 204 self.ui.warn(
204 205 _(b"%s is missing from %s/.hg/shamap\n")
205 206 % (revid, subpath)
206 207 )
207 208 else:
208 209 revid = newid
209 210
210 211 fp.write(b'%s %s\n' % (revid, subpath))
211 212
212 213 return fp.getvalue()
213 214
214 215 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
215 216 """Calculates the files from p2 that we need to pull in when merging p1
216 217 and p2, given that the merge is coming from the given source.
217 218
218 219 This prevents us from losing files that only exist in the target p2 and
219 220 that don't come from the source repo (like if you're merging multiple
220 221 repositories together).
221 222 """
222 223 anc = [p1ctx.ancestor(p2ctx)]
223 224 # Calculate what files are coming from p2
224 225 # TODO: mresult.commitinfo might be able to get that info
225 226 mresult = mergemod.calculateupdates(
226 227 self.repo,
227 228 p1ctx,
228 229 p2ctx,
229 230 anc,
230 231 branchmerge=True,
231 232 force=True,
232 233 acceptremote=False,
233 234 followcopies=False,
234 235 )
235 236
236 237 for file, (action, info, msg) in mresult.filemap():
237 238 if source.targetfilebelongstosource(file):
238 239 # If the file belongs to the source repo, ignore the p2
239 240 # since it will be covered by the existing fileset.
240 241 continue
241 242
242 243 # If the file requires actual merging, abort. We don't have enough
243 244 # context to resolve merges correctly.
244 if action in [b'm', b'dm', b'cd', b'dc']:
245 if action in mergestate.CONVERT_MERGE_ACTIONS:
245 246 raise error.Abort(
246 247 _(
247 248 b"unable to convert merge commit "
248 249 b"since target parents do not merge cleanly (file "
249 250 b"%s, parents %s and %s)"
250 251 )
251 252 % (file, p1ctx, p2ctx)
252 253 )
253 elif action == b'k':
254 elif action == mergestate.ACTION_KEEP:
254 255 # 'keep' means nothing changed from p1
255 256 continue
256 257 else:
257 258 # Any other change means we want to take the p2 version
258 259 yield file
259 260
260 261 def putcommit(
261 262 self, files, copies, parents, commit, source, revmap, full, cleanp2
262 263 ):
263 264 files = dict(files)
264 265
265 266 def getfilectx(repo, memctx, f):
266 267 if p2ctx and f in p2files and f not in copies:
267 268 self.ui.debug(b'reusing %s from p2\n' % f)
268 269 try:
269 270 return p2ctx[f]
270 271 except error.ManifestLookupError:
271 272 # If the file doesn't exist in p2, then we're syncing a
272 273 # delete, so just return None.
273 274 return None
274 275 try:
275 276 v = files[f]
276 277 except KeyError:
277 278 return None
278 279 data, mode = source.getfile(f, v)
279 280 if data is None:
280 281 return None
281 282 if f == b'.hgtags':
282 283 data = self._rewritetags(source, revmap, data)
283 284 if f == b'.hgsubstate':
284 285 data = self._rewritesubstate(source, data)
285 286 return context.memfilectx(
286 287 self.repo,
287 288 memctx,
288 289 f,
289 290 data,
290 291 b'l' in mode,
291 292 b'x' in mode,
292 293 copies.get(f),
293 294 )
294 295
295 296 pl = []
296 297 for p in parents:
297 298 if p not in pl:
298 299 pl.append(p)
299 300 parents = pl
300 301 nparents = len(parents)
301 302 if self.filemapmode and nparents == 1:
302 303 m1node = self.repo.changelog.read(bin(parents[0]))[0]
303 304 parent = parents[0]
304 305
305 306 if len(parents) < 2:
306 307 parents.append(self.repo.nullid)
307 308 if len(parents) < 2:
308 309 parents.append(self.repo.nullid)
309 310 p2 = parents.pop(0)
310 311
311 312 text = commit.desc
312 313
313 314 sha1s = re.findall(sha1re, text)
314 315 for sha1 in sha1s:
315 316 oldrev = source.lookuprev(sha1)
316 317 newrev = revmap.get(oldrev)
317 318 if newrev is not None:
318 319 text = text.replace(sha1, newrev[: len(sha1)])
319 320
320 321 extra = commit.extra.copy()
321 322
322 323 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
323 324 if sourcename:
324 325 extra[b'convert_source'] = sourcename
325 326
326 327 for label in (
327 328 b'source',
328 329 b'transplant_source',
329 330 b'rebase_source',
330 331 b'intermediate-source',
331 332 ):
332 333 node = extra.get(label)
333 334
334 335 if node is None:
335 336 continue
336 337
337 338 # Only transplant stores its reference in binary
338 339 if label == b'transplant_source':
339 340 node = hex(node)
340 341
341 342 newrev = revmap.get(node)
342 343 if newrev is not None:
343 344 if label == b'transplant_source':
344 345 newrev = bin(newrev)
345 346
346 347 extra[label] = newrev
347 348
348 349 if self.branchnames and commit.branch:
349 350 extra[b'branch'] = commit.branch
350 351 if commit.rev and commit.saverev:
351 352 extra[b'convert_revision'] = commit.rev
352 353
353 354 while parents:
354 355 p1 = p2
355 356 p2 = parents.pop(0)
356 357 p1ctx = self.repo[p1]
357 358 p2ctx = None
358 359 if p2 != self.repo.nullid:
359 360 p2ctx = self.repo[p2]
360 361 fileset = set(files)
361 362 if full:
362 363 fileset.update(self.repo[p1])
363 364 fileset.update(self.repo[p2])
364 365
365 366 if p2ctx:
366 367 p2files = set(cleanp2)
367 368 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
368 369 p2files.add(file)
369 370 fileset.add(file)
370 371
371 372 ctx = context.memctx(
372 373 self.repo,
373 374 (p1, p2),
374 375 text,
375 376 fileset,
376 377 getfilectx,
377 378 commit.author,
378 379 commit.date,
379 380 extra,
380 381 )
381 382
382 383 # We won't know if the conversion changes the node until after the
383 384 # commit, so copy the source's phase for now.
384 385 self.repo.ui.setconfig(
385 386 b'phases',
386 387 b'new-commit',
387 388 phases.phasenames[commit.phase],
388 389 b'convert',
389 390 )
390 391
391 392 with self.repo.transaction(b"convert") as tr:
392 393 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
393 394 origctx = commit.ctx
394 395 else:
395 396 origctx = None
396 397 node = hex(self.repo.commitctx(ctx, origctx=origctx))
397 398
398 399 # If the node value has changed, but the phase is lower than
399 400 # draft, set it back to draft since it hasn't been exposed
400 401 # anywhere.
401 402 if commit.rev != node:
402 403 ctx = self.repo[node]
403 404 if ctx.phase() < phases.draft:
404 405 phases.registernew(
405 406 self.repo, tr, phases.draft, [ctx.rev()]
406 407 )
407 408
408 409 text = b"(octopus merge fixup)\n"
409 410 p2 = node
410 411
411 412 if self.filemapmode and nparents == 1:
412 413 man = self.repo.manifestlog.getstorage(b'')
413 414 mnode = self.repo.changelog.read(bin(p2))[0]
414 415 closed = b'close' in commit.extra
415 416 if not closed and not man.cmp(m1node, man.revision(mnode)):
416 417 self.ui.status(_(b"filtering out empty revision\n"))
417 418 self.repo.rollback(force=True)
418 419 return parent
419 420 return p2
420 421
421 422 def puttags(self, tags):
422 423 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
423 424 tagparent = tagparent or self.repo.nullid
424 425
425 426 oldlines = set()
426 427 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
427 428 for h in heads:
428 429 if b'.hgtags' in self.repo[h]:
429 430 oldlines.update(
430 431 set(self.repo[h][b'.hgtags'].data().splitlines(True))
431 432 )
432 433 oldlines = sorted(list(oldlines))
433 434
434 435 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
435 436 if newlines == oldlines:
436 437 return None, None
437 438
438 439 # if the old and new tags match, then there is nothing to update
439 440 oldtags = set()
440 441 newtags = set()
441 442 for line in oldlines:
442 443 s = line.strip().split(b' ', 1)
443 444 if len(s) != 2:
444 445 continue
445 446 oldtags.add(s[1])
446 447 for line in newlines:
447 448 s = line.strip().split(b' ', 1)
448 449 if len(s) != 2:
449 450 continue
450 451 if s[1] not in oldtags:
451 452 newtags.add(s[1].strip())
452 453
453 454 if not newtags:
454 455 return None, None
455 456
456 457 data = b"".join(newlines)
457 458
458 459 def getfilectx(repo, memctx, f):
459 460 return context.memfilectx(repo, memctx, f, data, False, False, None)
460 461
461 462 self.ui.status(_(b"updating tags\n"))
462 463 date = b"%d 0" % int(time.mktime(time.gmtime()))
463 464 extra = {b'branch': self.tagsbranch}
464 465 ctx = context.memctx(
465 466 self.repo,
466 467 (tagparent, None),
467 468 b"update tags",
468 469 [b".hgtags"],
469 470 getfilectx,
470 471 b"convert-repo",
471 472 date,
472 473 extra,
473 474 )
474 475 node = self.repo.commitctx(ctx)
475 476 return hex(node), hex(tagparent)
476 477
477 478 def setfilemapmode(self, active):
478 479 self.filemapmode = active
479 480
480 481 def putbookmarks(self, updatedbookmark):
481 482 if not len(updatedbookmark):
482 483 return
483 484 wlock = lock = tr = None
484 485 try:
485 486 wlock = self.repo.wlock()
486 487 lock = self.repo.lock()
487 488 tr = self.repo.transaction(b'bookmark')
488 489 self.ui.status(_(b"updating bookmarks\n"))
489 490 destmarks = self.repo._bookmarks
490 491 changes = [
491 492 (bookmark, bin(updatedbookmark[bookmark]))
492 493 for bookmark in updatedbookmark
493 494 ]
494 495 destmarks.applychanges(self.repo, tr, changes)
495 496 tr.close()
496 497 finally:
497 498 lockmod.release(lock, wlock, tr)
498 499
499 500 def hascommitfrommap(self, rev):
500 501 # the exact semantics of clonebranches is unclear so we can't say no
501 502 return rev in self.repo or self.clonebranches
502 503
503 504 def hascommitforsplicemap(self, rev):
504 505 if rev not in self.repo and self.clonebranches:
505 506 raise error.Abort(
506 507 _(
507 508 b'revision %s not found in destination '
508 509 b'repository (lookups with clonebranches=true '
509 510 b'are not implemented)'
510 511 )
511 512 % rev
512 513 )
513 514 return rev in self.repo
514 515
515 516
516 517 class mercurial_source(common.converter_source):
517 518 def __init__(self, ui, repotype, path, revs=None):
518 519 common.converter_source.__init__(self, ui, repotype, path, revs)
519 520 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
520 521 self.ignored = set()
521 522 self.saverev = ui.configbool(b'convert', b'hg.saverev')
522 523 try:
523 524 self.repo = hg.repository(self.ui, path)
524 525 # try to provoke an exception if this isn't really a hg
525 526 # repo, but some other bogus compatible-looking url
526 527 if not self.repo.local():
527 528 raise error.RepoError
528 529 except error.RepoError:
529 530 ui.traceback()
530 531 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
531 532 self.lastrev = None
532 533 self.lastctx = None
533 534 self._changescache = None, None
534 535 self.convertfp = None
535 536 # Restrict converted revisions to startrev descendants
536 537 startnode = ui.config(b'convert', b'hg.startrev')
537 538 hgrevs = ui.config(b'convert', b'hg.revs')
538 539 if hgrevs is None:
539 540 if startnode is not None:
540 541 try:
541 542 startnode = self.repo.lookup(startnode)
542 543 except error.RepoError:
543 544 raise error.Abort(
544 545 _(b'%s is not a valid start revision') % startnode
545 546 )
546 547 startrev = self.repo.changelog.rev(startnode)
547 548 children = {startnode: 1}
548 549 for r in self.repo.changelog.descendants([startrev]):
549 550 children[self.repo.changelog.node(r)] = 1
550 551 self.keep = children.__contains__
551 552 else:
552 553 self.keep = util.always
553 554 if revs:
554 555 self._heads = [self.repo.lookup(r) for r in revs]
555 556 else:
556 557 self._heads = self.repo.heads()
557 558 else:
558 559 if revs or startnode is not None:
559 560 raise error.Abort(
560 561 _(
561 562 b'hg.revs cannot be combined with '
562 563 b'hg.startrev or --rev'
563 564 )
564 565 )
565 566 nodes = set()
566 567 parents = set()
567 568 for r in logcmdutil.revrange(self.repo, [hgrevs]):
568 569 ctx = self.repo[r]
569 570 nodes.add(ctx.node())
570 571 parents.update(p.node() for p in ctx.parents())
571 572 self.keep = nodes.__contains__
572 573 self._heads = nodes - parents
573 574
574 575 def _changectx(self, rev):
575 576 if self.lastrev != rev:
576 577 self.lastctx = self.repo[rev]
577 578 self.lastrev = rev
578 579 return self.lastctx
579 580
580 581 def _parents(self, ctx):
581 582 return [p for p in ctx.parents() if p and self.keep(p.node())]
582 583
583 584 def getheads(self):
584 585 return [hex(h) for h in self._heads if self.keep(h)]
585 586
586 587 def getfile(self, name, rev):
587 588 try:
588 589 fctx = self._changectx(rev)[name]
589 590 return fctx.data(), fctx.flags()
590 591 except error.LookupError:
591 592 return None, None
592 593
593 594 def _changedfiles(self, ctx1, ctx2):
594 595 ma, r = [], []
595 596 maappend = ma.append
596 597 rappend = r.append
597 598 d = ctx1.manifest().diff(ctx2.manifest())
598 599 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
599 600 if node2 is None:
600 601 rappend(f)
601 602 else:
602 603 maappend(f)
603 604 return ma, r
604 605
605 606 def getchanges(self, rev, full):
606 607 ctx = self._changectx(rev)
607 608 parents = self._parents(ctx)
608 609 if full or not parents:
609 610 files = copyfiles = ctx.manifest()
610 611 if parents:
611 612 if self._changescache[0] == rev:
612 613 ma, r = self._changescache[1]
613 614 else:
614 615 ma, r = self._changedfiles(parents[0], ctx)
615 616 if not full:
616 617 files = ma + r
617 618 copyfiles = ma
618 619 # _getcopies() is also run for roots and before filtering so missing
619 620 # revlogs are detected early
620 621 copies = self._getcopies(ctx, parents, copyfiles)
621 622 cleanp2 = set()
622 623 if len(parents) == 2:
623 624 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
624 625 for f, value in pycompat.iteritems(d):
625 626 if value is None:
626 627 cleanp2.add(f)
627 628 changes = [(f, rev) for f in files if f not in self.ignored]
628 629 changes.sort()
629 630 return changes, copies, cleanp2
630 631
631 632 def _getcopies(self, ctx, parents, files):
632 633 copies = {}
633 634 for name in files:
634 635 if name in self.ignored:
635 636 continue
636 637 try:
637 638 copysource = ctx.filectx(name).copysource()
638 639 if copysource in self.ignored:
639 640 continue
640 641 # Ignore copy sources not in parent revisions
641 642 if not any(copysource in p for p in parents):
642 643 continue
643 644 copies[name] = copysource
644 645 except TypeError:
645 646 pass
646 647 except error.LookupError as e:
647 648 if not self.ignoreerrors:
648 649 raise
649 650 self.ignored.add(name)
650 651 self.ui.warn(_(b'ignoring: %s\n') % e)
651 652 return copies
652 653
653 654 def getcommit(self, rev):
654 655 ctx = self._changectx(rev)
655 656 _parents = self._parents(ctx)
656 657 parents = [p.hex() for p in _parents]
657 658 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
658 659 crev = rev
659 660
660 661 return common.commit(
661 662 author=ctx.user(),
662 663 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
663 664 desc=ctx.description(),
664 665 rev=crev,
665 666 parents=parents,
666 667 optparents=optparents,
667 668 branch=ctx.branch(),
668 669 extra=ctx.extra(),
669 670 sortkey=ctx.rev(),
670 671 saverev=self.saverev,
671 672 phase=ctx.phase(),
672 673 ctx=ctx,
673 674 )
674 675
675 676 def numcommits(self):
676 677 return len(self.repo)
677 678
678 679 def gettags(self):
679 680 # This will get written to .hgtags, filter non global tags out.
680 681 tags = [
681 682 t
682 683 for t in self.repo.tagslist()
683 684 if self.repo.tagtype(t[0]) == b'global'
684 685 ]
685 686 return {name: hex(node) for name, node in tags if self.keep(node)}
686 687
687 688 def getchangedfiles(self, rev, i):
688 689 ctx = self._changectx(rev)
689 690 parents = self._parents(ctx)
690 691 if not parents and i is None:
691 692 i = 0
692 693 ma, r = ctx.manifest().keys(), []
693 694 else:
694 695 i = i or 0
695 696 ma, r = self._changedfiles(parents[i], ctx)
696 697 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
697 698
698 699 if i == 0:
699 700 self._changescache = (rev, (ma, r))
700 701
701 702 return ma + r
702 703
703 704 def converted(self, rev, destrev):
704 705 if self.convertfp is None:
705 706 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
706 707 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
707 708 self.convertfp.flush()
708 709
709 710 def before(self):
710 711 self.ui.debug(b'run hg source pre-conversion action\n')
711 712
712 713 def after(self):
713 714 self.ui.debug(b'run hg source post-conversion action\n')
714 715
715 716 def hasnativeorder(self):
716 717 return True
717 718
718 719 def hasnativeclose(self):
719 720 return True
720 721
721 722 def lookuprev(self, rev):
722 723 try:
723 724 return hex(self.repo.lookup(rev))
724 725 except (error.RepoError, error.LookupError):
725 726 return None
726 727
727 728 def getbookmarks(self):
728 729 return bookmarks.listbookmarks(self.repo)
729 730
730 731 def checkrevformat(self, revstr, mapname=b'splicemap'):
731 732 """Mercurial, revision string is a 40 byte hex"""
732 733 self.checkhexformat(revstr, mapname)
@@ -1,971 +1,958 b''
1 1 # fix - rewrite file content in changesets and working copy
2 2 #
3 3 # Copyright 2018 Google LLC.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8 8
9 9 Provides a command that runs configured tools on the contents of modified files,
10 10 writing back any fixes to the working copy or replacing changesets.
11 11
12 12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 13 formatting fixes to modified lines in C++ code::
14 14
15 15 [fix]
16 16 clang-format:command=clang-format --assume-filename={rootpath}
17 17 clang-format:linerange=--lines={first}:{last}
18 18 clang-format:pattern=set:**.cpp or **.hpp
19 19
20 20 The :command suboption forms the first part of the shell command that will be
21 21 used to fix a file. The content of the file is passed on standard input, and the
22 22 fixed file content is expected on standard output. Any output on standard error
23 23 will be displayed as a warning. If the exit status is not zero, the file will
24 24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 25 status but no standard error output. Some values may be substituted into the
26 26 command::
27 27
28 28 {rootpath} The path of the file being fixed, relative to the repo root
29 29 {basename} The name of the file being fixed, without the directory path
30 30
31 31 If the :linerange suboption is set, the tool will only be run if there are
32 32 changed lines in a file. The value of this suboption is appended to the shell
33 33 command once for every range of changed lines in the file. Some values may be
34 34 substituted into the command::
35 35
36 36 {first} The 1-based line number of the first line in the modified range
37 37 {last} The 1-based line number of the last line in the modified range
38 38
39 39 Deleted sections of a file will be ignored by :linerange, because there is no
40 40 corresponding line range in the version being fixed.
41 41
42 42 By default, tools that set :linerange will only be executed if there is at least
43 43 one changed line range. This is meant to prevent accidents like running a code
44 44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 46 to false.
47 47
48 48 The :pattern suboption determines which files will be passed through each
49 49 configured tool. See :hg:`help patterns` for possible values. However, all
50 50 patterns are relative to the repo root, even if that text says they are relative
51 51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 52 intersection of these patterns is used.
53 53
54 54 There is also a configurable limit for the maximum size of file that will be
55 55 processed by :hg:`fix`::
56 56
57 57 [fix]
58 58 maxfilesize = 2MB
59 59
60 60 Normally, execution of configured tools will continue after a failure (indicated
61 61 by a non-zero exit status). It can also be configured to abort after the first
62 62 such failure, so that no files will be affected if any tool fails. This abort
63 63 will also cause :hg:`fix` to exit with a non-zero status::
64 64
65 65 [fix]
66 66 failure = abort
67 67
68 68 When multiple tools are configured to affect a file, they execute in an order
69 69 defined by the :priority suboption. The priority suboption has a default value
70 70 of zero for each tool. Tools are executed in order of descending priority. The
71 71 execution order of tools with equal priority is unspecified. For example, you
72 72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 73 in a text file by ensuring that 'sort' runs before 'head'::
74 74
75 75 [fix]
76 76 sort:command = sort -n
77 77 head:command = head -n 10
78 78 sort:pattern = numbers.txt
79 79 head:pattern = numbers.txt
80 80 sort:priority = 2
81 81 head:priority = 1
82 82
83 83 To account for changes made by each tool, the line numbers used for incremental
84 84 formatting are recomputed before executing the next tool. So, each tool may see
85 85 different values for the arguments added by the :linerange suboption.
86 86
87 87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 88 content. The metadata must be placed before the file content on stdout,
89 89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 91 is expected to produce this metadata encoding if and only if the :metadata
92 92 suboption is true::
93 93
94 94 [fix]
95 95 tool:command = tool --prepend-json-metadata
96 96 tool:metadata = true
97 97
98 98 The metadata values are passed to hooks, which can be used to print summaries or
99 99 perform other post-fixing work. The supported hooks are::
100 100
101 101 "postfixfile"
102 102 Run once for each file in each revision where any fixer tools made changes
103 103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 105 tools that affected the file. Fixer tools that didn't affect the file have a
106 106 value of None. Only fixer tools that executed are present in the metadata.
107 107
108 108 "postfix"
109 109 Run once after all files and revisions have been handled. Provides
110 110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 113 mapping fixer tool names to lists of metadata values returned from
114 114 executions that modified a file. This aggregates the same metadata
115 115 previously passed to the "postfixfile" hook.
116 116
117 117 Fixer tools are run in the repository's root directory. This allows them to read
118 118 configuration files from the working copy, or even write to the working copy.
119 119 The working copy is not updated to match the revision being fixed. In fact,
120 120 several revisions may be fixed in parallel. Writes to the working copy are not
121 121 amended into the revision being fixed; fixer tools should always write fixed
122 122 file content back to stdout as documented above.
123 123 """
124 124
125 125 from __future__ import absolute_import
126 126
127 127 import collections
128 128 import itertools
129 129 import os
130 130 import re
131 131 import subprocess
132 132
133 133 from mercurial.i18n import _
134 134 from mercurial.node import (
135 135 nullid,
136 136 nullrev,
137 137 wdirrev,
138 138 )
139 139
140 140 from mercurial.utils import procutil
141 141
142 142 from mercurial import (
143 143 cmdutil,
144 144 context,
145 145 copies,
146 146 error,
147 147 logcmdutil,
148 148 match as matchmod,
149 149 mdiff,
150 150 merge,
151 151 mergestate as mergestatemod,
152 obsolete,
153 152 pycompat,
154 153 registrar,
155 154 rewriteutil,
156 155 scmutil,
157 156 util,
158 157 worker,
159 158 )
160 159
161 160 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
162 161 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
163 162 # be specifying the version(s) of Mercurial they are tested with, or
164 163 # leave the attribute unspecified.
165 164 testedwith = b'ships-with-hg-core'
166 165
167 166 cmdtable = {}
168 167 command = registrar.command(cmdtable)
169 168
170 169 configtable = {}
171 170 configitem = registrar.configitem(configtable)
172 171
173 172 # Register the suboptions allowed for each configured fixer, and default values.
174 173 FIXER_ATTRS = {
175 174 b'command': None,
176 175 b'linerange': None,
177 176 b'pattern': None,
178 177 b'priority': 0,
179 178 b'metadata': False,
180 179 b'skipclean': True,
181 180 b'enabled': True,
182 181 }
183 182
184 183 for key, default in FIXER_ATTRS.items():
185 184 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
186 185
187 186 # A good default size allows most source code files to be fixed, but avoids
188 187 # letting fixer tools choke on huge inputs, which could be surprising to the
189 188 # user.
190 189 configitem(b'fix', b'maxfilesize', default=b'2MB')
191 190
192 191 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
193 192 # This helps users do shell scripts that stop when a fixer tool signals a
194 193 # problem.
195 194 configitem(b'fix', b'failure', default=b'continue')
196 195
197 196
198 197 def checktoolfailureaction(ui, message, hint=None):
199 198 """Abort with 'message' if fix.failure=abort"""
200 199 action = ui.config(b'fix', b'failure')
201 200 if action not in (b'continue', b'abort'):
202 201 raise error.Abort(
203 202 _(b'unknown fix.failure action: %s') % (action,),
204 203 hint=_(b'use "continue" or "abort"'),
205 204 )
206 205 if action == b'abort':
207 206 raise error.Abort(message, hint=hint)
208 207
209 208
210 209 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
211 210 baseopt = (
212 211 b'',
213 212 b'base',
214 213 [],
215 214 _(
216 215 b'revisions to diff against (overrides automatic '
217 216 b'selection, and applies to every revision being '
218 217 b'fixed)'
219 218 ),
220 219 _(b'REV'),
221 220 )
222 221 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
223 222 sourceopt = (
224 223 b's',
225 224 b'source',
226 225 [],
227 226 _(b'fix the specified revisions and their descendants'),
228 227 _(b'REV'),
229 228 )
230 229 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
231 230 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
232 231 usage = _(b'[OPTION]... [FILE]...')
233 232
234 233
235 234 @command(
236 235 b'fix',
237 236 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
238 237 usage,
239 238 helpcategory=command.CATEGORY_FILE_CONTENTS,
240 239 )
241 240 def fix(ui, repo, *pats, **opts):
242 241 """rewrite file content in changesets or working directory
243 242
244 243 Runs any configured tools to fix the content of files. Only affects files
245 244 with changes, unless file arguments are provided. Only affects changed lines
246 245 of files, unless the --whole flag is used. Some tools may always affect the
247 246 whole file regardless of --whole.
248 247
249 248 If --working-dir is used, files with uncommitted changes in the working copy
250 249 will be fixed. Note that no backup are made.
251 250
252 251 If revisions are specified with --source, those revisions and their
253 252 descendants will be checked, and they may be replaced with new revisions
254 253 that have fixed file content. By automatically including the descendants,
255 254 no merging, rebasing, or evolution will be required. If an ancestor of the
256 255 working copy is included, then the working copy itself will also be fixed,
257 256 and the working copy will be updated to the fixed parent.
258 257
259 258 When determining what lines of each file to fix at each revision, the whole
260 259 set of revisions being fixed is considered, so that fixes to earlier
261 260 revisions are not forgotten in later ones. The --base flag can be used to
262 261 override this default behavior, though it is not usually desirable to do so.
263 262 """
264 263 opts = pycompat.byteskwargs(opts)
265 264 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
266 265 cmdutil.check_incompatible_arguments(
267 266 opts, b'working_dir', [b'all', b'source']
268 267 )
269 268
270 269 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
271 270 revstofix = getrevstofix(ui, repo, opts)
272 271 basectxs = getbasectxs(repo, opts, revstofix)
273 272 workqueue, numitems = getworkqueue(
274 273 ui, repo, pats, opts, revstofix, basectxs
275 274 )
276 275 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
277 276 fixers = getfixers(ui)
278 277
279 278 # Rather than letting each worker independently fetch the files
280 279 # (which also would add complications for shared/keepalive
281 280 # connections), prefetch them all first.
282 281 _prefetchfiles(repo, workqueue, basepaths)
283 282
284 283 # There are no data dependencies between the workers fixing each file
285 284 # revision, so we can use all available parallelism.
286 285 def getfixes(items):
287 286 for srcrev, path, dstrevs in items:
288 287 ctx = repo[srcrev]
289 288 olddata = ctx[path].data()
290 289 metadata, newdata = fixfile(
291 290 ui,
292 291 repo,
293 292 opts,
294 293 fixers,
295 294 ctx,
296 295 path,
297 296 basepaths,
298 297 basectxs[srcrev],
299 298 )
300 299 # We ungroup the work items now, because the code that consumes
301 300 # these results has to handle each dstrev separately, and in
302 301 # topological order. Because these are handled in topological
303 302 # order, it's important that we pass around references to
304 303 # "newdata" instead of copying it. Otherwise, we would be
305 304 # keeping more copies of file content in memory at a time than
306 305 # if we hadn't bothered to group/deduplicate the work items.
307 306 data = newdata if newdata != olddata else None
308 307 for dstrev in dstrevs:
309 308 yield (dstrev, path, metadata, data)
310 309
311 310 results = worker.worker(
312 311 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
313 312 )
314 313
315 314 # We have to hold on to the data for each successor revision in memory
316 315 # until all its parents are committed. We ensure this by committing and
317 316 # freeing memory for the revisions in some topological order. This
318 317 # leaves a little bit of memory efficiency on the table, but also makes
319 318 # the tests deterministic. It might also be considered a feature since
320 319 # it makes the results more easily reproducible.
321 320 filedata = collections.defaultdict(dict)
322 321 aggregatemetadata = collections.defaultdict(list)
323 322 replacements = {}
324 323 wdirwritten = False
325 324 commitorder = sorted(revstofix, reverse=True)
326 325 with ui.makeprogress(
327 326 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
328 327 ) as progress:
329 328 for rev, path, filerevmetadata, newdata in results:
330 329 progress.increment(item=path)
331 330 for fixername, fixermetadata in filerevmetadata.items():
332 331 aggregatemetadata[fixername].append(fixermetadata)
333 332 if newdata is not None:
334 333 filedata[rev][path] = newdata
335 334 hookargs = {
336 335 b'rev': rev,
337 336 b'path': path,
338 337 b'metadata': filerevmetadata,
339 338 }
340 339 repo.hook(
341 340 b'postfixfile',
342 341 throw=False,
343 342 **pycompat.strkwargs(hookargs)
344 343 )
345 344 numitems[rev] -= 1
346 345 # Apply the fixes for this and any other revisions that are
347 346 # ready and sitting at the front of the queue. Using a loop here
348 347 # prevents the queue from being blocked by the first revision to
349 348 # be ready out of order.
350 349 while commitorder and not numitems[commitorder[-1]]:
351 350 rev = commitorder.pop()
352 351 ctx = repo[rev]
353 352 if rev == wdirrev:
354 353 writeworkingdir(repo, ctx, filedata[rev], replacements)
355 354 wdirwritten = bool(filedata[rev])
356 355 else:
357 356 replacerev(ui, repo, ctx, filedata[rev], replacements)
358 357 del filedata[rev]
359 358
360 359 cleanup(repo, replacements, wdirwritten)
361 360 hookargs = {
362 361 b'replacements': replacements,
363 362 b'wdirwritten': wdirwritten,
364 363 b'metadata': aggregatemetadata,
365 364 }
366 365 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
367 366
368 367
369 368 def cleanup(repo, replacements, wdirwritten):
370 369 """Calls scmutil.cleanupnodes() with the given replacements.
371 370
372 371 "replacements" is a dict from nodeid to nodeid, with one key and one value
373 372 for every revision that was affected by fixing. This is slightly different
374 373 from cleanupnodes().
375 374
376 375 "wdirwritten" is a bool which tells whether the working copy was affected by
377 376 fixing, since it has no entry in "replacements".
378 377
379 378 Useful as a hook point for extending "hg fix" with output summarizing the
380 379 effects of the command, though we choose not to output anything here.
381 380 """
382 381 replacements = {
383 382 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
384 383 }
385 384 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
386 385
387 386
388 387 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
389 388 """Constructs a list of files to fix and which revisions each fix applies to
390 389
391 390 To avoid duplicating work, there is usually only one work item for each file
392 391 revision that might need to be fixed. There can be multiple work items per
393 392 file revision if the same file needs to be fixed in multiple changesets with
394 393 different baserevs. Each work item also contains a list of changesets where
395 394 the file's data should be replaced with the fixed data. The work items for
396 395 earlier changesets come earlier in the work queue, to improve pipelining by
397 396 allowing the first changeset to be replaced while fixes are still being
398 397 computed for later changesets.
399 398
400 399 Also returned is a map from changesets to the count of work items that might
401 400 affect each changeset. This is used later to count when all of a changeset's
402 401 work items have been finished, without having to inspect the remaining work
403 402 queue in each worker subprocess.
404 403
405 404 The example work item (1, "foo/bar.txt", (1, 2, 3)) means that the data of
406 405 bar.txt should be read from revision 1, then fixed, and written back to
407 406 revisions 1, 2 and 3. Revision 1 is called the "srcrev" and the list of
408 407 revisions is called the "dstrevs". In practice the srcrev is always one of
409 408 the dstrevs, and we make that choice when constructing the work item so that
410 409 the choice can't be made inconsistently later on. The dstrevs should all
411 410 have the same file revision for the given path, so the choice of srcrev is
412 411 arbitrary. The wdirrev can be a dstrev and a srcrev.
413 412 """
414 413 dstrevmap = collections.defaultdict(list)
415 414 numitems = collections.defaultdict(int)
416 415 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
417 416 for rev in sorted(revstofix):
418 417 fixctx = repo[rev]
419 418 match = scmutil.match(fixctx, pats, opts)
420 419 for path in sorted(
421 420 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
422 421 ):
423 422 fctx = fixctx[path]
424 423 if fctx.islink():
425 424 continue
426 425 if fctx.size() > maxfilesize:
427 426 ui.warn(
428 427 _(b'ignoring file larger than %s: %s\n')
429 428 % (util.bytecount(maxfilesize), path)
430 429 )
431 430 continue
432 431 baserevs = tuple(ctx.rev() for ctx in basectxs[rev])
433 432 dstrevmap[(fctx.filerev(), baserevs, path)].append(rev)
434 433 numitems[rev] += 1
435 434 workqueue = [
436 435 (min(dstrevs), path, dstrevs)
437 436 for (_filerev, _baserevs, path), dstrevs in dstrevmap.items()
438 437 ]
439 438 # Move work items for earlier changesets to the front of the queue, so we
440 439 # might be able to replace those changesets (in topological order) while
441 440 # we're still processing later work items. Note the min() in the previous
442 441 # expression, which means we don't need a custom comparator here. The path
443 442 # is also important in the sort order to make the output order stable. There
444 443 # are some situations where this doesn't help much, but some situations
445 444 # where it lets us buffer O(1) files instead of O(n) files.
446 445 workqueue.sort()
447 446 return workqueue, numitems
448 447
449 448
450 449 def getrevstofix(ui, repo, opts):
451 450 """Returns the set of revision numbers that should be fixed"""
452 451 if opts[b'all']:
453 452 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
454 453 elif opts[b'source']:
455 454 source_revs = logcmdutil.revrange(repo, opts[b'source'])
456 455 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
457 456 if wdirrev in source_revs:
458 457 # `wdir()::` is currently empty, so manually add wdir
459 458 revs.add(wdirrev)
460 459 if repo[b'.'].rev() in revs:
461 460 revs.add(wdirrev)
462 461 else:
463 462 revs = set(logcmdutil.revrange(repo, opts[b'rev']))
464 463 if opts.get(b'working_dir'):
465 464 revs.add(wdirrev)
466 for rev in revs:
467 checkfixablectx(ui, repo, repo[rev])
468 465 # Allow fixing only wdir() even if there's an unfinished operation
469 466 if not (len(revs) == 1 and wdirrev in revs):
470 467 cmdutil.checkunfinished(repo)
471 468 rewriteutil.precheck(repo, revs, b'fix')
472 469 if (
473 470 wdirrev in revs
474 471 and mergestatemod.mergestate.read(repo).unresolvedcount()
475 472 ):
476 473 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
477 474 if not revs:
478 475 raise error.Abort(
479 476 b'no changesets specified', hint=b'use --source or --working-dir'
480 477 )
481 478 return revs
482 479
483 480
484 def checkfixablectx(ui, repo, ctx):
485 """Aborts if the revision shouldn't be replaced with a fixed one."""
486 if ctx.obsolete():
487 # It would be better to actually check if the revision has a successor.
488 if not obsolete.isenabled(repo, obsolete.allowdivergenceopt):
489 raise error.Abort(
490 b'fixing obsolete revision could cause divergence'
491 )
492
493
494 481 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
495 482 """Returns the set of files that should be fixed in a context
496 483
497 484 The result depends on the base contexts; we include any file that has
498 485 changed relative to any of the base contexts. Base contexts should be
499 486 ancestors of the context being fixed.
500 487 """
501 488 files = set()
502 489 for basectx in basectxs:
503 490 stat = basectx.status(
504 491 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
505 492 )
506 493 files.update(
507 494 set(
508 495 itertools.chain(
509 496 stat.added, stat.modified, stat.clean, stat.unknown
510 497 )
511 498 )
512 499 )
513 500 return files
514 501
515 502
516 503 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
517 504 """Returns the set of line ranges that should be fixed in a file
518 505
519 506 Of the form [(10, 20), (30, 40)].
520 507
521 508 This depends on the given base contexts; we must consider lines that have
522 509 changed versus any of the base contexts, and whether the file has been
523 510 renamed versus any of them.
524 511
525 512 Another way to understand this is that we exclude line ranges that are
526 513 common to the file in all base contexts.
527 514 """
528 515 if opts.get(b'whole'):
529 516 # Return a range containing all lines. Rely on the diff implementation's
530 517 # idea of how many lines are in the file, instead of reimplementing it.
531 518 return difflineranges(b'', content2)
532 519
533 520 rangeslist = []
534 521 for basectx in basectxs:
535 522 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
536 523
537 524 if basepath in basectx:
538 525 content1 = basectx[basepath].data()
539 526 else:
540 527 content1 = b''
541 528 rangeslist.extend(difflineranges(content1, content2))
542 529 return unionranges(rangeslist)
543 530
544 531
545 532 def getbasepaths(repo, opts, workqueue, basectxs):
546 533 if opts.get(b'whole'):
547 534 # Base paths will never be fetched for line range determination.
548 535 return {}
549 536
550 537 basepaths = {}
551 538 for srcrev, path, _dstrevs in workqueue:
552 539 fixctx = repo[srcrev]
553 540 for basectx in basectxs[srcrev]:
554 541 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
555 542 if basepath in basectx:
556 543 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
557 544 return basepaths
558 545
559 546
560 547 def unionranges(rangeslist):
561 548 """Return the union of some closed intervals
562 549
563 550 >>> unionranges([])
564 551 []
565 552 >>> unionranges([(1, 100)])
566 553 [(1, 100)]
567 554 >>> unionranges([(1, 100), (1, 100)])
568 555 [(1, 100)]
569 556 >>> unionranges([(1, 100), (2, 100)])
570 557 [(1, 100)]
571 558 >>> unionranges([(1, 99), (1, 100)])
572 559 [(1, 100)]
573 560 >>> unionranges([(1, 100), (40, 60)])
574 561 [(1, 100)]
575 562 >>> unionranges([(1, 49), (50, 100)])
576 563 [(1, 100)]
577 564 >>> unionranges([(1, 48), (50, 100)])
578 565 [(1, 48), (50, 100)]
579 566 >>> unionranges([(1, 2), (3, 4), (5, 6)])
580 567 [(1, 6)]
581 568 """
582 569 rangeslist = sorted(set(rangeslist))
583 570 unioned = []
584 571 if rangeslist:
585 572 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
586 573 for a, b in rangeslist:
587 574 c, d = unioned[-1]
588 575 if a > d + 1:
589 576 unioned.append((a, b))
590 577 else:
591 578 unioned[-1] = (c, max(b, d))
592 579 return unioned
593 580
594 581
595 582 def difflineranges(content1, content2):
596 583 """Return list of line number ranges in content2 that differ from content1.
597 584
598 585 Line numbers are 1-based. The numbers are the first and last line contained
599 586 in the range. Single-line ranges have the same line number for the first and
600 587 last line. Excludes any empty ranges that result from lines that are only
601 588 present in content1. Relies on mdiff's idea of where the line endings are in
602 589 the string.
603 590
604 591 >>> from mercurial import pycompat
605 592 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
606 593 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
607 594 >>> difflineranges2(b'', b'')
608 595 []
609 596 >>> difflineranges2(b'a', b'')
610 597 []
611 598 >>> difflineranges2(b'', b'A')
612 599 [(1, 1)]
613 600 >>> difflineranges2(b'a', b'a')
614 601 []
615 602 >>> difflineranges2(b'a', b'A')
616 603 [(1, 1)]
617 604 >>> difflineranges2(b'ab', b'')
618 605 []
619 606 >>> difflineranges2(b'', b'AB')
620 607 [(1, 2)]
621 608 >>> difflineranges2(b'abc', b'ac')
622 609 []
623 610 >>> difflineranges2(b'ab', b'aCb')
624 611 [(2, 2)]
625 612 >>> difflineranges2(b'abc', b'aBc')
626 613 [(2, 2)]
627 614 >>> difflineranges2(b'ab', b'AB')
628 615 [(1, 2)]
629 616 >>> difflineranges2(b'abcde', b'aBcDe')
630 617 [(2, 2), (4, 4)]
631 618 >>> difflineranges2(b'abcde', b'aBCDe')
632 619 [(2, 4)]
633 620 """
634 621 ranges = []
635 622 for lines, kind in mdiff.allblocks(content1, content2):
636 623 firstline, lastline = lines[2:4]
637 624 if kind == b'!' and firstline != lastline:
638 625 ranges.append((firstline + 1, lastline))
639 626 return ranges
640 627
641 628
642 629 def getbasectxs(repo, opts, revstofix):
643 630 """Returns a map of the base contexts for each revision
644 631
645 632 The base contexts determine which lines are considered modified when we
646 633 attempt to fix just the modified lines in a file. It also determines which
647 634 files we attempt to fix, so it is important to compute this even when
648 635 --whole is used.
649 636 """
650 637 # The --base flag overrides the usual logic, and we give every revision
651 638 # exactly the set of baserevs that the user specified.
652 639 if opts.get(b'base'):
653 640 baserevs = set(logcmdutil.revrange(repo, opts.get(b'base')))
654 641 if not baserevs:
655 642 baserevs = {nullrev}
656 643 basectxs = {repo[rev] for rev in baserevs}
657 644 return {rev: basectxs for rev in revstofix}
658 645
659 646 # Proceed in topological order so that we can easily determine each
660 647 # revision's baserevs by looking at its parents and their baserevs.
661 648 basectxs = collections.defaultdict(set)
662 649 for rev in sorted(revstofix):
663 650 ctx = repo[rev]
664 651 for pctx in ctx.parents():
665 652 if pctx.rev() in basectxs:
666 653 basectxs[rev].update(basectxs[pctx.rev()])
667 654 else:
668 655 basectxs[rev].add(pctx)
669 656 return basectxs
670 657
671 658
672 659 def _prefetchfiles(repo, workqueue, basepaths):
673 660 toprefetch = set()
674 661
675 662 # Prefetch the files that will be fixed.
676 663 for srcrev, path, _dstrevs in workqueue:
677 664 if srcrev == wdirrev:
678 665 continue
679 666 toprefetch.add((srcrev, path))
680 667
681 668 # Prefetch the base contents for lineranges().
682 669 for (baserev, fixrev, path), basepath in basepaths.items():
683 670 toprefetch.add((baserev, basepath))
684 671
685 672 if toprefetch:
686 673 scmutil.prefetchfiles(
687 674 repo,
688 675 [
689 676 (rev, scmutil.matchfiles(repo, [path]))
690 677 for rev, path in toprefetch
691 678 ],
692 679 )
693 680
694 681
695 682 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
696 683 """Run any configured fixers that should affect the file in this context
697 684
698 685 Returns the file content that results from applying the fixers in some order
699 686 starting with the file's content in the fixctx. Fixers that support line
700 687 ranges will affect lines that have changed relative to any of the basectxs
701 688 (i.e. they will only avoid lines that are common to all basectxs).
702 689
703 690 A fixer tool's stdout will become the file's new content if and only if it
704 691 exits with code zero. The fixer tool's working directory is the repository's
705 692 root.
706 693 """
707 694 metadata = {}
708 695 newdata = fixctx[path].data()
709 696 for fixername, fixer in pycompat.iteritems(fixers):
710 697 if fixer.affects(opts, fixctx, path):
711 698 ranges = lineranges(
712 699 opts, path, basepaths, basectxs, fixctx, newdata
713 700 )
714 701 command = fixer.command(ui, path, ranges)
715 702 if command is None:
716 703 continue
717 704 ui.debug(b'subprocess: %s\n' % (command,))
718 705 proc = subprocess.Popen(
719 706 procutil.tonativestr(command),
720 707 shell=True,
721 708 cwd=procutil.tonativestr(repo.root),
722 709 stdin=subprocess.PIPE,
723 710 stdout=subprocess.PIPE,
724 711 stderr=subprocess.PIPE,
725 712 )
726 713 stdout, stderr = proc.communicate(newdata)
727 714 if stderr:
728 715 showstderr(ui, fixctx.rev(), fixername, stderr)
729 716 newerdata = stdout
730 717 if fixer.shouldoutputmetadata():
731 718 try:
732 719 metadatajson, newerdata = stdout.split(b'\0', 1)
733 720 metadata[fixername] = pycompat.json_loads(metadatajson)
734 721 except ValueError:
735 722 ui.warn(
736 723 _(b'ignored invalid output from fixer tool: %s\n')
737 724 % (fixername,)
738 725 )
739 726 continue
740 727 else:
741 728 metadata[fixername] = None
742 729 if proc.returncode == 0:
743 730 newdata = newerdata
744 731 else:
745 732 if not stderr:
746 733 message = _(b'exited with status %d\n') % (proc.returncode,)
747 734 showstderr(ui, fixctx.rev(), fixername, message)
748 735 checktoolfailureaction(
749 736 ui,
750 737 _(b'no fixes will be applied'),
751 738 hint=_(
752 739 b'use --config fix.failure=continue to apply any '
753 740 b'successful fixes anyway'
754 741 ),
755 742 )
756 743 return metadata, newdata
757 744
758 745
759 746 def showstderr(ui, rev, fixername, stderr):
760 747 """Writes the lines of the stderr string as warnings on the ui
761 748
762 749 Uses the revision number and fixername to give more context to each line of
763 750 the error message. Doesn't include file names, since those take up a lot of
764 751 space and would tend to be included in the error message if they were
765 752 relevant.
766 753 """
767 754 for line in re.split(b'[\r\n]+', stderr):
768 755 if line:
769 756 ui.warn(b'[')
770 757 if rev is None:
771 758 ui.warn(_(b'wdir'), label=b'evolve.rev')
772 759 else:
773 760 ui.warn(b'%d' % rev, label=b'evolve.rev')
774 761 ui.warn(b'] %s: %s\n' % (fixername, line))
775 762
776 763
777 764 def writeworkingdir(repo, ctx, filedata, replacements):
778 765 """Write new content to the working copy and check out the new p1 if any
779 766
780 767 We check out a new revision if and only if we fixed something in both the
781 768 working directory and its parent revision. This avoids the need for a full
782 769 update/merge, and means that the working directory simply isn't affected
783 770 unless the --working-dir flag is given.
784 771
785 772 Directly updates the dirstate for the affected files.
786 773 """
787 774 for path, data in pycompat.iteritems(filedata):
788 775 fctx = ctx[path]
789 776 fctx.write(data, fctx.flags())
790 777
791 778 oldp1 = repo.dirstate.p1()
792 779 newp1 = replacements.get(oldp1, oldp1)
793 780 if newp1 != oldp1:
794 781 assert repo.dirstate.p2() == nullid
795 782 with repo.dirstate.parentchange():
796 783 scmutil.movedirstate(repo, repo[newp1])
797 784
798 785
799 786 def replacerev(ui, repo, ctx, filedata, replacements):
800 787 """Commit a new revision like the given one, but with file content changes
801 788
802 789 "ctx" is the original revision to be replaced by a modified one.
803 790
804 791 "filedata" is a dict that maps paths to their new file content. All other
805 792 paths will be recreated from the original revision without changes.
806 793 "filedata" may contain paths that didn't exist in the original revision;
807 794 they will be added.
808 795
809 796 "replacements" is a dict that maps a single node to a single node, and it is
810 797 updated to indicate the original revision is replaced by the newly created
811 798 one. No entry is added if the replacement's node already exists.
812 799
813 800 The new revision has the same parents as the old one, unless those parents
814 801 have already been replaced, in which case those replacements are the parents
815 802 of this new revision. Thus, if revisions are replaced in topological order,
816 803 there is no need to rebase them into the original topology later.
817 804 """
818 805
819 806 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
820 807 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
821 808 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
822 809 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
823 810
824 811 # We don't want to create a revision that has no changes from the original,
825 812 # but we should if the original revision's parent has been replaced.
826 813 # Otherwise, we would produce an orphan that needs no actual human
827 814 # intervention to evolve. We can't rely on commit() to avoid creating the
828 815 # un-needed revision because the extra field added below produces a new hash
829 816 # regardless of file content changes.
830 817 if (
831 818 not filedata
832 819 and p1ctx.node() not in replacements
833 820 and p2ctx.node() not in replacements
834 821 ):
835 822 return
836 823
837 824 extra = ctx.extra().copy()
838 825 extra[b'fix_source'] = ctx.hex()
839 826
840 827 wctx = context.overlayworkingctx(repo)
841 828 wctx.setbase(repo[newp1node])
842 829 merge.revert_to(ctx, wc=wctx)
843 830 copies.graftcopies(wctx, ctx, ctx.p1())
844 831
845 832 for path in filedata.keys():
846 833 fctx = ctx[path]
847 834 copysource = fctx.copysource()
848 835 wctx.write(path, filedata[path], flags=fctx.flags())
849 836 if copysource:
850 837 wctx.markcopied(path, copysource)
851 838
852 839 desc = rewriteutil.update_hash_refs(
853 840 repo,
854 841 ctx.description(),
855 842 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
856 843 )
857 844
858 845 memctx = wctx.tomemctx(
859 846 text=desc,
860 847 branch=ctx.branch(),
861 848 extra=extra,
862 849 date=ctx.date(),
863 850 parents=(newp1node, newp2node),
864 851 user=ctx.user(),
865 852 )
866 853
867 854 sucnode = memctx.commit()
868 855 prenode = ctx.node()
869 856 if prenode == sucnode:
870 857 ui.debug(b'node %s already existed\n' % (ctx.hex()))
871 858 else:
872 859 replacements[ctx.node()] = sucnode
873 860
874 861
875 862 def getfixers(ui):
876 863 """Returns a map of configured fixer tools indexed by their names
877 864
878 865 Each value is a Fixer object with methods that implement the behavior of the
879 866 fixer's config suboptions. Does not validate the config values.
880 867 """
881 868 fixers = {}
882 869 for name in fixernames(ui):
883 870 enabled = ui.configbool(b'fix', name + b':enabled')
884 871 command = ui.config(b'fix', name + b':command')
885 872 pattern = ui.config(b'fix', name + b':pattern')
886 873 linerange = ui.config(b'fix', name + b':linerange')
887 874 priority = ui.configint(b'fix', name + b':priority')
888 875 metadata = ui.configbool(b'fix', name + b':metadata')
889 876 skipclean = ui.configbool(b'fix', name + b':skipclean')
890 877 # Don't use a fixer if it has no pattern configured. It would be
891 878 # dangerous to let it affect all files. It would be pointless to let it
892 879 # affect no files. There is no reasonable subset of files to use as the
893 880 # default.
894 881 if command is None:
895 882 ui.warn(
896 883 _(b'fixer tool has no command configuration: %s\n') % (name,)
897 884 )
898 885 elif pattern is None:
899 886 ui.warn(
900 887 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
901 888 )
902 889 elif not enabled:
903 890 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
904 891 else:
905 892 fixers[name] = Fixer(
906 893 command, pattern, linerange, priority, metadata, skipclean
907 894 )
908 895 return collections.OrderedDict(
909 896 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
910 897 )
911 898
912 899
913 900 def fixernames(ui):
914 901 """Returns the names of [fix] config options that have suboptions"""
915 902 names = set()
916 903 for k, v in ui.configitems(b'fix'):
917 904 if b':' in k:
918 905 names.add(k.split(b':', 1)[0])
919 906 return names
920 907
921 908
922 909 class Fixer(object):
923 910 """Wraps the raw config values for a fixer with methods"""
924 911
925 912 def __init__(
926 913 self, command, pattern, linerange, priority, metadata, skipclean
927 914 ):
928 915 self._command = command
929 916 self._pattern = pattern
930 917 self._linerange = linerange
931 918 self._priority = priority
932 919 self._metadata = metadata
933 920 self._skipclean = skipclean
934 921
935 922 def affects(self, opts, fixctx, path):
936 923 """Should this fixer run on the file at the given path and context?"""
937 924 repo = fixctx.repo()
938 925 matcher = matchmod.match(
939 926 repo.root, repo.root, [self._pattern], ctx=fixctx
940 927 )
941 928 return matcher(path)
942 929
943 930 def shouldoutputmetadata(self):
944 931 """Should the stdout of this fixer start with JSON and a null byte?"""
945 932 return self._metadata
946 933
947 934 def command(self, ui, path, ranges):
948 935 """A shell command to use to invoke this fixer on the given file/lines
949 936
950 937 May return None if there is no appropriate command to run for the given
951 938 parameters.
952 939 """
953 940 expand = cmdutil.rendercommandtemplate
954 941 parts = [
955 942 expand(
956 943 ui,
957 944 self._command,
958 945 {b'rootpath': path, b'basename': os.path.basename(path)},
959 946 )
960 947 ]
961 948 if self._linerange:
962 949 if self._skipclean and not ranges:
963 950 # No line ranges to fix, so don't run the fixer.
964 951 return None
965 952 for first, last in ranges:
966 953 parts.append(
967 954 expand(
968 955 ui, self._linerange, {b'first': first, b'last': last}
969 956 )
970 957 )
971 958 return b' '.join(parts)
@@ -1,343 +1,344 b''
1 1 """grant Mercurial the ability to operate on Git repositories. (EXPERIMENTAL)
2 2
3 3 This is currently super experimental. It probably will consume your
4 4 firstborn a la Rumpelstiltskin, etc.
5 5 """
6 6
7 7 from __future__ import absolute_import
8 8
9 9 import os
10 10
11 11 from mercurial.i18n import _
12 12
13 13 from mercurial import (
14 14 commands,
15 15 error,
16 16 extensions,
17 17 localrepo,
18 18 pycompat,
19 19 registrar,
20 20 scmutil,
21 21 store,
22 22 util,
23 23 )
24 24
25 25 from . import (
26 26 dirstate,
27 27 gitlog,
28 28 gitutil,
29 29 index,
30 30 )
31 31
32 32 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 33 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 34 # be specifying the version(s) of Mercurial they are tested with, or
35 35 # leave the attribute unspecified.
36 36 testedwith = b'ships-with-hg-core'
37 37
38 38 configtable = {}
39 39 configitem = registrar.configitem(configtable)
40 40 # git.log-index-cache-miss: internal knob for testing
41 41 configitem(
42 42 b"git",
43 43 b"log-index-cache-miss",
44 44 default=False,
45 45 )
46 46
47 47 getversion = gitutil.pygit2_version
48 48
49 49
50 50 # TODO: extract an interface for this in core
51 51 class gitstore(object): # store.basicstore):
52 52 def __init__(self, path, vfstype):
53 53 self.vfs = vfstype(path)
54 self.opener = self.vfs
54 55 self.path = self.vfs.base
55 56 self.createmode = store._calcmode(self.vfs)
56 57 # above lines should go away in favor of:
57 58 # super(gitstore, self).__init__(path, vfstype)
58 59
59 60 self.git = gitutil.get_pygit2().Repository(
60 61 os.path.normpath(os.path.join(path, b'..', b'.git'))
61 62 )
62 63 self._progress_factory = lambda *args, **kwargs: None
63 64 self._logfn = lambda x: None
64 65
65 66 @util.propertycache
66 67 def _db(self):
67 68 # We lazy-create the database because we want to thread a
68 69 # progress callback down to the indexing process if it's
69 70 # required, and we don't have a ui handle in makestore().
70 71 return index.get_index(self.git, self._logfn, self._progress_factory)
71 72
72 73 def join(self, f):
73 74 """Fake store.join method for git repositories.
74 75
75 76 For the most part, store.join is used for @storecache
76 77 decorators to invalidate caches when various files
77 78 change. We'll map the ones we care about, and ignore the rest.
78 79 """
79 80 if f in (b'00changelog.i', b'00manifest.i'):
80 81 # This is close enough: in order for the changelog cache
81 82 # to be invalidated, HEAD will have to change.
82 83 return os.path.join(self.path, b'HEAD')
83 84 elif f == b'lock':
84 85 # TODO: we probably want to map this to a git lock, I
85 86 # suspect index.lock. We should figure out what the
86 87 # most-alike file is in git-land. For now we're risking
87 88 # bad concurrency errors if another git client is used.
88 89 return os.path.join(self.path, b'hgit-bogus-lock')
89 90 elif f in (b'obsstore', b'phaseroots', b'narrowspec', b'bookmarks'):
90 91 return os.path.join(self.path, b'..', b'.hg', f)
91 92 raise NotImplementedError(b'Need to pick file for %s.' % f)
92 93
93 94 def changelog(self, trypending, concurrencychecker):
94 95 # TODO we don't have a plan for trypending in hg's git support yet
95 96 return gitlog.changelog(self.git, self._db)
96 97
97 98 def manifestlog(self, repo, storenarrowmatch):
98 99 # TODO handle storenarrowmatch and figure out if we need the repo arg
99 100 return gitlog.manifestlog(self.git, self._db)
100 101
101 102 def invalidatecaches(self):
102 103 pass
103 104
104 105 def write(self, tr=None):
105 106 # normally this handles things like fncache writes, which we don't have
106 107 pass
107 108
108 109
109 110 def _makestore(orig, requirements, storebasepath, vfstype):
110 111 if b'git' in requirements:
111 112 if not os.path.exists(os.path.join(storebasepath, b'..', b'.git')):
112 113 raise error.Abort(
113 114 _(
114 115 b'repository specified git format in '
115 116 b'.hg/requires but has no .git directory'
116 117 )
117 118 )
118 119 # Check for presence of pygit2 only here. The assumption is that we'll
119 120 # run this code iff we'll later need pygit2.
120 121 if gitutil.get_pygit2() is None:
121 122 raise error.Abort(
122 123 _(
123 124 b'the git extension requires the Python '
124 125 b'pygit2 library to be installed'
125 126 )
126 127 )
127 128
128 129 return gitstore(storebasepath, vfstype)
129 130 return orig(requirements, storebasepath, vfstype)
130 131
131 132
132 133 class gitfilestorage(object):
133 134 def file(self, path):
134 135 if path[0:1] == b'/':
135 136 path = path[1:]
136 137 return gitlog.filelog(self.store.git, self.store._db, path)
137 138
138 139
139 140 def _makefilestorage(orig, requirements, features, **kwargs):
140 141 store = kwargs['store']
141 142 if isinstance(store, gitstore):
142 143 return gitfilestorage
143 144 return orig(requirements, features, **kwargs)
144 145
145 146
146 147 def _setupdothg(ui, path):
147 148 dothg = os.path.join(path, b'.hg')
148 149 if os.path.exists(dothg):
149 150 ui.warn(_(b'git repo already initialized for hg\n'))
150 151 else:
151 152 os.mkdir(os.path.join(path, b'.hg'))
152 153 # TODO is it ok to extend .git/info/exclude like this?
153 154 with open(
154 155 os.path.join(path, b'.git', b'info', b'exclude'), 'ab'
155 156 ) as exclude:
156 157 exclude.write(b'\n.hg\n')
157 158 with open(os.path.join(dothg, b'requires'), 'wb') as f:
158 159 f.write(b'git\n')
159 160
160 161
161 162 _BMS_PREFIX = 'refs/heads/'
162 163
163 164
164 165 class gitbmstore(object):
165 166 def __init__(self, gitrepo):
166 167 self.gitrepo = gitrepo
167 168 self._aclean = True
168 169 self._active = gitrepo.references['HEAD'] # git head, not mark
169 170
170 171 def __contains__(self, name):
171 172 return (
172 173 _BMS_PREFIX + pycompat.fsdecode(name)
173 174 ) in self.gitrepo.references
174 175
175 176 def __iter__(self):
176 177 for r in self.gitrepo.listall_references():
177 178 if r.startswith(_BMS_PREFIX):
178 179 yield pycompat.fsencode(r[len(_BMS_PREFIX) :])
179 180
180 181 def __getitem__(self, k):
181 182 return (
182 183 self.gitrepo.references[_BMS_PREFIX + pycompat.fsdecode(k)]
183 184 .peel()
184 185 .id.raw
185 186 )
186 187
187 188 def get(self, k, default=None):
188 189 try:
189 190 if k in self:
190 191 return self[k]
191 192 return default
192 193 except gitutil.get_pygit2().InvalidSpecError:
193 194 return default
194 195
195 196 @property
196 197 def active(self):
197 198 h = self.gitrepo.references['HEAD']
198 199 if not isinstance(h.target, str) or not h.target.startswith(
199 200 _BMS_PREFIX
200 201 ):
201 202 return None
202 203 return pycompat.fsencode(h.target[len(_BMS_PREFIX) :])
203 204
204 205 @active.setter
205 206 def active(self, mark):
206 207 githead = mark is not None and (_BMS_PREFIX + mark) or None
207 208 if githead is not None and githead not in self.gitrepo.references:
208 209 raise AssertionError(b'bookmark %s does not exist!' % mark)
209 210
210 211 self._active = githead
211 212 self._aclean = False
212 213
213 214 def _writeactive(self):
214 215 if self._aclean:
215 216 return
216 217 self.gitrepo.references.create('HEAD', self._active, True)
217 218 self._aclean = True
218 219
219 220 def names(self, node):
220 221 r = []
221 222 for ref in self.gitrepo.listall_references():
222 223 if not ref.startswith(_BMS_PREFIX):
223 224 continue
224 225 if self.gitrepo.references[ref].peel().id.raw != node:
225 226 continue
226 227 r.append(pycompat.fsencode(ref[len(_BMS_PREFIX) :]))
227 228 return r
228 229
229 230 # Cleanup opportunity: this is *identical* to core's bookmarks store.
230 231 def expandname(self, bname):
231 232 if bname == b'.':
232 233 if self.active:
233 234 return self.active
234 235 raise error.RepoLookupError(_(b"no active bookmark"))
235 236 return bname
236 237
237 238 def applychanges(self, repo, tr, changes):
238 239 """Apply a list of changes to bookmarks"""
239 240 # TODO: this should respect transactions, but that's going to
240 241 # require enlarging the gitbmstore to know how to do in-memory
241 242 # temporary writes and read those back prior to transaction
242 243 # finalization.
243 244 for name, node in changes:
244 245 if node is None:
245 246 self.gitrepo.references.delete(
246 247 _BMS_PREFIX + pycompat.fsdecode(name)
247 248 )
248 249 else:
249 250 self.gitrepo.references.create(
250 251 _BMS_PREFIX + pycompat.fsdecode(name),
251 252 gitutil.togitnode(node),
252 253 force=True,
253 254 )
254 255
255 256 def checkconflict(self, mark, force=False, target=None):
256 257 githead = _BMS_PREFIX + mark
257 258 cur = self.gitrepo.references['HEAD']
258 259 if githead in self.gitrepo.references and not force:
259 260 if target:
260 261 if self.gitrepo.references[githead] == target and target == cur:
261 262 # re-activating a bookmark
262 263 return []
263 264 # moving a bookmark - forward?
264 265 raise NotImplementedError
265 266 raise error.Abort(
266 267 _(b"bookmark '%s' already exists (use -f to force)") % mark
267 268 )
268 269 if len(mark) > 3 and not force:
269 270 try:
270 271 shadowhash = scmutil.isrevsymbol(self._repo, mark)
271 272 except error.LookupError: # ambiguous identifier
272 273 shadowhash = False
273 274 if shadowhash:
274 275 self._repo.ui.warn(
275 276 _(
276 277 b"bookmark %s matches a changeset hash\n"
277 278 b"(did you leave a -r out of an 'hg bookmark' "
278 279 b"command?)\n"
279 280 )
280 281 % mark
281 282 )
282 283 return []
283 284
284 285
285 286 def init(orig, ui, dest=b'.', **opts):
286 287 if opts.get('git', False):
287 288 path = util.abspath(dest)
288 289 # TODO: walk up looking for the git repo
289 290 _setupdothg(ui, path)
290 291 return 0
291 292 return orig(ui, dest=dest, **opts)
292 293
293 294
294 295 def reposetup(ui, repo):
295 296 if repo.local() and isinstance(repo.store, gitstore):
296 297 orig = repo.__class__
297 298 repo.store._progress_factory = repo.ui.makeprogress
298 299 if ui.configbool(b'git', b'log-index-cache-miss'):
299 300 repo.store._logfn = repo.ui.warn
300 301
301 302 class gitlocalrepo(orig):
302 303 def _makedirstate(self):
303 304 # TODO narrow support here
304 305 return dirstate.gitdirstate(
305 306 self.ui, self.vfs.base, self.store.git
306 307 )
307 308
308 309 def commit(self, *args, **kwargs):
309 310 ret = orig.commit(self, *args, **kwargs)
310 311 if ret is None:
311 312 # there was nothing to commit, so we should skip
312 313 # the index fixup logic we'd otherwise do.
313 314 return None
314 315 tid = self.store.git[gitutil.togitnode(ret)].tree.id
315 316 # DANGER! This will flush any writes staged to the
316 317 # index in Git, but we're sidestepping the index in a
317 318 # way that confuses git when we commit. Alas.
318 319 self.store.git.index.read_tree(tid)
319 320 self.store.git.index.write()
320 321 return ret
321 322
322 323 @property
323 324 def _bookmarks(self):
324 325 return gitbmstore(self.store.git)
325 326
326 327 repo.__class__ = gitlocalrepo
327 328 return repo
328 329
329 330
330 331 def _featuresetup(ui, supported):
331 332 # don't die on seeing a repo with the git requirement
332 333 supported |= {b'git'}
333 334
334 335
335 336 def extsetup(ui):
336 337 extensions.wrapfunction(localrepo, b'makestore', _makestore)
337 338 extensions.wrapfunction(localrepo, b'makefilestorage', _makefilestorage)
338 339 # Inject --git flag for `hg init`
339 340 entry = extensions.wrapcommand(commands.table, b'init', init)
340 341 entry[1].extend(
341 342 [(b'', b'git', None, b'setup up a git repository instead of hg')]
342 343 )
343 344 localrepo.featuresetupfuncs.add(_featuresetup)
@@ -1,369 +1,369 b''
1 1 from __future__ import absolute_import
2 2
3 3 import contextlib
4 4 import errno
5 5 import os
6 6
7 7 from mercurial.node import sha1nodeconstants
8 8 from mercurial import (
9 9 error,
10 10 extensions,
11 11 match as matchmod,
12 12 pycompat,
13 13 scmutil,
14 14 util,
15 15 )
16 16 from mercurial.interfaces import (
17 17 dirstate as intdirstate,
18 18 util as interfaceutil,
19 19 )
20 20
21 21 from . import gitutil
22 22
23 23 pygit2 = gitutil.get_pygit2()
24 24
25 25
26 26 def readpatternfile(orig, filepath, warn, sourceinfo=False):
27 27 if not (b'info/exclude' in filepath or filepath.endswith(b'.gitignore')):
28 28 return orig(filepath, warn, sourceinfo=False)
29 29 result = []
30 30 warnings = []
31 31 with open(filepath, b'rb') as fp:
32 32 for l in fp:
33 33 l = l.strip()
34 34 if not l or l.startswith(b'#'):
35 35 continue
36 36 if l.startswith(b'!'):
37 37 warnings.append(b'unsupported ignore pattern %s' % l)
38 38 continue
39 39 if l.startswith(b'/'):
40 40 result.append(b'rootglob:' + l[1:])
41 41 else:
42 42 result.append(b'relglob:' + l)
43 43 return result, warnings
44 44
45 45
46 46 extensions.wrapfunction(matchmod, b'readpatternfile', readpatternfile)
47 47
48 48
49 49 _STATUS_MAP = {}
50 50 if pygit2:
51 51 _STATUS_MAP = {
52 52 pygit2.GIT_STATUS_CONFLICTED: b'm',
53 53 pygit2.GIT_STATUS_CURRENT: b'n',
54 54 pygit2.GIT_STATUS_IGNORED: b'?',
55 55 pygit2.GIT_STATUS_INDEX_DELETED: b'r',
56 56 pygit2.GIT_STATUS_INDEX_MODIFIED: b'n',
57 57 pygit2.GIT_STATUS_INDEX_NEW: b'a',
58 58 pygit2.GIT_STATUS_INDEX_RENAMED: b'a',
59 59 pygit2.GIT_STATUS_INDEX_TYPECHANGE: b'n',
60 60 pygit2.GIT_STATUS_WT_DELETED: b'r',
61 61 pygit2.GIT_STATUS_WT_MODIFIED: b'n',
62 62 pygit2.GIT_STATUS_WT_NEW: b'?',
63 63 pygit2.GIT_STATUS_WT_RENAMED: b'a',
64 64 pygit2.GIT_STATUS_WT_TYPECHANGE: b'n',
65 65 pygit2.GIT_STATUS_WT_UNREADABLE: b'?',
66 66 pygit2.GIT_STATUS_INDEX_MODIFIED | pygit2.GIT_STATUS_WT_MODIFIED: b'm',
67 67 }
68 68
69 69
70 70 @interfaceutil.implementer(intdirstate.idirstate)
71 71 class gitdirstate(object):
72 72 def __init__(self, ui, root, gitrepo):
73 73 self._ui = ui
74 74 self._root = os.path.dirname(root)
75 75 self.git = gitrepo
76 76 self._plchangecallbacks = {}
77 77 # TODO: context.poststatusfixup is bad and uses this attribute
78 78 self._dirty = False
79 79
80 80 def p1(self):
81 81 try:
82 82 return self.git.head.peel().id.raw
83 83 except pygit2.GitError:
84 84 # Typically happens when peeling HEAD fails, as in an
85 85 # empty repository.
86 86 return sha1nodeconstants.nullid
87 87
88 88 def p2(self):
89 89 # TODO: MERGE_HEAD? something like that, right?
90 90 return sha1nodeconstants.nullid
91 91
92 92 def setparents(self, p1, p2=None):
93 93 if p2 is None:
94 94 p2 = sha1nodeconstants.nullid
95 95 assert p2 == sha1nodeconstants.nullid, b'TODO merging support'
96 96 self.git.head.set_target(gitutil.togitnode(p1))
97 97
98 98 @util.propertycache
99 99 def identity(self):
100 100 return util.filestat.frompath(
101 101 os.path.join(self._root, b'.git', b'index')
102 102 )
103 103
104 104 def branch(self):
105 105 return b'default'
106 106
107 107 def parents(self):
108 108 # TODO how on earth do we find p2 if a merge is in flight?
109 109 return self.p1(), sha1nodeconstants.nullid
110 110
111 111 def __iter__(self):
112 112 return (pycompat.fsencode(f.path) for f in self.git.index)
113 113
114 114 def items(self):
115 115 for ie in self.git.index:
116 116 yield ie.path, None # value should be a DirstateItem
117 117
118 118 # py2,3 compat forward
119 119 iteritems = items
120 120
121 121 def __getitem__(self, filename):
122 122 try:
123 123 gs = self.git.status_file(filename)
124 124 except KeyError:
125 125 return b'?'
126 126 return _STATUS_MAP[gs]
127 127
128 128 def __contains__(self, filename):
129 129 try:
130 130 gs = self.git.status_file(filename)
131 131 return _STATUS_MAP[gs] != b'?'
132 132 except KeyError:
133 133 return False
134 134
135 135 def status(self, match, subrepos, ignored, clean, unknown):
136 136 listclean = clean
137 137 # TODO handling of clean files - can we get that from git.status()?
138 138 modified, added, removed, deleted, unknown, ignored, clean = (
139 139 [],
140 140 [],
141 141 [],
142 142 [],
143 143 [],
144 144 [],
145 145 [],
146 146 )
147 147 gstatus = self.git.status()
148 148 for path, status in gstatus.items():
149 149 path = pycompat.fsencode(path)
150 150 if not match(path):
151 151 continue
152 152 if status == pygit2.GIT_STATUS_IGNORED:
153 153 if path.endswith(b'/'):
154 154 continue
155 155 ignored.append(path)
156 156 elif status in (
157 157 pygit2.GIT_STATUS_WT_MODIFIED,
158 158 pygit2.GIT_STATUS_INDEX_MODIFIED,
159 159 pygit2.GIT_STATUS_WT_MODIFIED
160 160 | pygit2.GIT_STATUS_INDEX_MODIFIED,
161 161 ):
162 162 modified.append(path)
163 163 elif status == pygit2.GIT_STATUS_INDEX_NEW:
164 164 added.append(path)
165 165 elif status == pygit2.GIT_STATUS_WT_NEW:
166 166 unknown.append(path)
167 167 elif status == pygit2.GIT_STATUS_WT_DELETED:
168 168 deleted.append(path)
169 169 elif status == pygit2.GIT_STATUS_INDEX_DELETED:
170 170 removed.append(path)
171 171 else:
172 172 raise error.Abort(
173 173 b'unhandled case: status for %r is %r' % (path, status)
174 174 )
175 175
176 176 if listclean:
177 177 observed = set(
178 178 modified + added + removed + deleted + unknown + ignored
179 179 )
180 180 index = self.git.index
181 181 index.read()
182 182 for entry in index:
183 183 path = pycompat.fsencode(entry.path)
184 184 if not match(path):
185 185 continue
186 186 if path in observed:
187 187 continue # already in some other set
188 188 if path[-1] == b'/':
189 189 continue # directory
190 190 clean.append(path)
191 191
192 192 # TODO are we really always sure of status here?
193 193 return (
194 194 False,
195 195 scmutil.status(
196 196 modified, added, removed, deleted, unknown, ignored, clean
197 197 ),
198 198 )
199 199
200 200 def flagfunc(self, buildfallback):
201 201 # TODO we can do better
202 202 return buildfallback()
203 203
204 204 def getcwd(self):
205 205 # TODO is this a good way to do this?
206 206 return os.path.dirname(
207 207 os.path.dirname(pycompat.fsencode(self.git.path))
208 208 )
209 209
210 210 def normalize(self, path):
211 211 normed = util.normcase(path)
212 212 assert normed == path, b"TODO handling of case folding: %s != %s" % (
213 213 normed,
214 214 path,
215 215 )
216 216 return path
217 217
218 218 @property
219 219 def _checklink(self):
220 220 return util.checklink(os.path.dirname(pycompat.fsencode(self.git.path)))
221 221
222 222 def copies(self):
223 223 # TODO support copies?
224 224 return {}
225 225
226 226 # # TODO what the heck is this
227 227 _filecache = set()
228 228
229 229 def pendingparentchange(self):
230 230 # TODO: we need to implement the context manager bits and
231 231 # correctly stage/revert index edits.
232 232 return False
233 233
234 234 def write(self, tr):
235 235 # TODO: call parent change callbacks
236 236
237 237 if tr:
238 238
239 239 def writeinner(category):
240 240 self.git.index.write()
241 241
242 242 tr.addpending(b'gitdirstate', writeinner)
243 243 else:
244 244 self.git.index.write()
245 245
246 246 def pathto(self, f, cwd=None):
247 247 if cwd is None:
248 248 cwd = self.getcwd()
249 249 # TODO core dirstate does something about slashes here
250 250 assert isinstance(f, bytes)
251 251 r = util.pathto(self._root, cwd, f)
252 252 return r
253 253
254 254 def matches(self, match):
255 255 for x in self.git.index:
256 256 p = pycompat.fsencode(x.path)
257 257 if match(p):
258 258 yield p
259 259
260 def set_clean(self, f, parentfiledata=None):
260 def set_clean(self, f, parentfiledata):
261 261 """Mark a file normal and clean."""
262 262 # TODO: for now we just let libgit2 re-stat the file. We can
263 263 # clearly do better.
264 264
265 265 def set_possibly_dirty(self, f):
266 266 """Mark a file normal, but possibly dirty."""
267 267 # TODO: for now we just let libgit2 re-stat the file. We can
268 268 # clearly do better.
269 269
270 270 def walk(self, match, subrepos, unknown, ignored, full=True):
271 271 # TODO: we need to use .status() and not iterate the index,
272 272 # because the index doesn't force a re-walk and so `hg add` of
273 273 # a new file without an intervening call to status will
274 274 # silently do nothing.
275 275 r = {}
276 276 cwd = self.getcwd()
277 277 for path, status in self.git.status().items():
278 278 if path.startswith('.hg/'):
279 279 continue
280 280 path = pycompat.fsencode(path)
281 281 if not match(path):
282 282 continue
283 283 # TODO construct the stat info from the status object?
284 284 try:
285 285 s = os.stat(os.path.join(cwd, path))
286 286 except OSError as e:
287 287 if e.errno != errno.ENOENT:
288 288 raise
289 289 continue
290 290 r[path] = s
291 291 return r
292 292
293 293 def savebackup(self, tr, backupname):
294 294 # TODO: figure out a strategy for saving index backups.
295 295 pass
296 296
297 297 def restorebackup(self, tr, backupname):
298 298 # TODO: figure out a strategy for saving index backups.
299 299 pass
300 300
301 301 def set_tracked(self, f):
302 302 uf = pycompat.fsdecode(f)
303 303 if uf in self.git.index:
304 304 return False
305 305 index = self.git.index
306 306 index.read()
307 307 index.add(uf)
308 308 index.write()
309 309 return True
310 310
311 311 def add(self, f):
312 312 index = self.git.index
313 313 index.read()
314 314 index.add(pycompat.fsdecode(f))
315 315 index.write()
316 316
317 317 def drop(self, f):
318 318 index = self.git.index
319 319 index.read()
320 320 fs = pycompat.fsdecode(f)
321 321 if fs in index:
322 322 index.remove(fs)
323 323 index.write()
324 324
325 325 def set_untracked(self, f):
326 326 index = self.git.index
327 327 index.read()
328 328 fs = pycompat.fsdecode(f)
329 329 if fs in index:
330 330 index.remove(fs)
331 331 index.write()
332 332 return True
333 333 return False
334 334
335 335 def remove(self, f):
336 336 index = self.git.index
337 337 index.read()
338 338 index.remove(pycompat.fsdecode(f))
339 339 index.write()
340 340
341 341 def copied(self, path):
342 342 # TODO: track copies?
343 343 return None
344 344
345 345 def prefetch_parents(self):
346 346 # TODO
347 347 pass
348 348
349 349 def update_file(self, *args, **kwargs):
350 350 # TODO
351 351 pass
352 352
353 353 @contextlib.contextmanager
354 354 def parentchange(self):
355 355 # TODO: track this maybe?
356 356 yield
357 357
358 358 def addparentchangecallback(self, category, callback):
359 359 # TODO: should this be added to the dirstate interface?
360 360 self._plchangecallbacks[category] = callback
361 361
362 362 def clearbackup(self, tr, backupname):
363 363 # TODO
364 364 pass
365 365
366 366 def setbranch(self, branch):
367 367 raise error.Abort(
368 368 b'git repos do not support branches. try using bookmarks'
369 369 )
@@ -1,2678 +1,2690 b''
1 1 # histedit.py - interactive history editing for mercurial
2 2 #
3 3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """interactive history editing
8 8
9 9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 10 is as follows, assuming the following history::
11 11
12 12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 13 | Add delta
14 14 |
15 15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 16 | Add gamma
17 17 |
18 18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 19 | Add beta
20 20 |
21 21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 22 Add alpha
23 23
24 24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 25 file open in your editor::
26 26
27 27 pick c561b4e977df Add beta
28 28 pick 030b686bedc4 Add gamma
29 29 pick 7c2fd3b9020c Add delta
30 30
31 31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 32 #
33 33 # Commits are listed from least to most recent
34 34 #
35 35 # Commands:
36 36 # p, pick = use commit
37 37 # e, edit = use commit, but allow edits before making new commit
38 38 # f, fold = use commit, but combine it with the one above
39 39 # r, roll = like fold, but discard this commit's description and date
40 40 # d, drop = remove commit from history
41 41 # m, mess = edit commit message without changing commit content
42 42 # b, base = checkout changeset and apply further changesets from there
43 43 #
44 44
45 45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 46 for each revision in your history. For example, if you had meant to add gamma
47 47 before beta, and then wanted to add delta in the same revision as beta, you
48 48 would reorganize the file to look like this::
49 49
50 50 pick 030b686bedc4 Add gamma
51 51 pick c561b4e977df Add beta
52 52 fold 7c2fd3b9020c Add delta
53 53
54 54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 55 #
56 56 # Commits are listed from least to most recent
57 57 #
58 58 # Commands:
59 59 # p, pick = use commit
60 60 # e, edit = use commit, but allow edits before making new commit
61 61 # f, fold = use commit, but combine it with the one above
62 62 # r, roll = like fold, but discard this commit's description and date
63 63 # d, drop = remove commit from history
64 64 # m, mess = edit commit message without changing commit content
65 65 # b, base = checkout changeset and apply further changesets from there
66 66 #
67 67
68 68 At which point you close the editor and ``histedit`` starts working. When you
69 69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 70 those revisions together, offering you a chance to clean up the commit message::
71 71
72 72 Add beta
73 73 ***
74 74 Add delta
75 75
76 76 Edit the commit message to your liking, then close the editor. The date used
77 77 for the commit will be the later of the two commits' dates. For this example,
78 78 let's assume that the commit message was changed to ``Add beta and delta.``
79 79 After histedit has run and had a chance to remove any old or temporary
80 80 revisions it needed, the history looks like this::
81 81
82 82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 83 | Add beta and delta.
84 84 |
85 85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 86 | Add gamma
87 87 |
88 88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 89 Add alpha
90 90
91 91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 92 ones) until after it has completed all the editing operations, so it will
93 93 probably perform several strip operations when it's done. For the above example,
94 94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 95 so you might need to be a little patient. You can choose to keep the original
96 96 revisions by passing the ``--keep`` flag.
97 97
98 98 The ``edit`` operation will drop you back to a command prompt,
99 99 allowing you to edit files freely, or even use ``hg record`` to commit
100 100 some changes as a separate commit. When you're done, any remaining
101 101 uncommitted changes will be committed as well. When done, run ``hg
102 102 histedit --continue`` to finish this step. If there are uncommitted
103 103 changes, you'll be prompted for a new commit message, but the default
104 104 commit message will be the original message for the ``edit`` ed
105 105 revision, and the date of the original commit will be preserved.
106 106
107 107 The ``message`` operation will give you a chance to revise a commit
108 108 message without changing the contents. It's a shortcut for doing
109 109 ``edit`` immediately followed by `hg histedit --continue``.
110 110
111 111 If ``histedit`` encounters a conflict when moving a revision (while
112 112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 113 ``edit`` with the difference that it won't prompt you for a commit
114 114 message when done. If you decide at this point that you don't like how
115 115 much work it will be to rearrange history, or that you made a mistake,
116 116 you can use ``hg histedit --abort`` to abandon the new changes you
117 117 have made and return to the state before you attempted to edit your
118 118 history.
119 119
120 120 If we clone the histedit-ed example repository above and add four more
121 121 changes, such that we have the following history::
122 122
123 123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 124 | Add theta
125 125 |
126 126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 127 | Add eta
128 128 |
129 129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 130 | Add zeta
131 131 |
132 132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 133 | Add epsilon
134 134 |
135 135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 136 | Add beta and delta.
137 137 |
138 138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 139 | Add gamma
140 140 |
141 141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 142 Add alpha
143 143
144 144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 146 repository that Mercurial does not detect to be related to the source
147 147 repo, you can add a ``--force`` option.
148 148
149 149 Config
150 150 ------
151 151
152 152 Histedit rule lines are truncated to 80 characters by default. You
153 153 can customize this behavior by setting a different length in your
154 154 configuration file::
155 155
156 156 [histedit]
157 157 linelen = 120 # truncate rule lines at 120 characters
158 158
159 159 The summary of a change can be customized as well::
160 160
161 161 [histedit]
162 162 summary-template = '{rev} {bookmarks} {desc|firstline}'
163 163
164 164 The customized summary should be kept short enough that rule lines
165 165 will fit in the configured line length. See above if that requires
166 166 customization.
167 167
168 168 ``hg histedit`` attempts to automatically choose an appropriate base
169 169 revision to use. To change which base revision is used, define a
170 170 revset in your configuration file::
171 171
172 172 [histedit]
173 173 defaultrev = only(.) & draft()
174 174
175 175 By default each edited revision needs to be present in histedit commands.
176 176 To remove revision you need to use ``drop`` operation. You can configure
177 177 the drop to be implicit for missing commits by adding::
178 178
179 179 [histedit]
180 180 dropmissing = True
181 181
182 182 By default, histedit will close the transaction after each action. For
183 183 performance purposes, you can configure histedit to use a single transaction
184 184 across the entire histedit. WARNING: This setting introduces a significant risk
185 185 of losing the work you've done in a histedit if the histedit aborts
186 186 unexpectedly::
187 187
188 188 [histedit]
189 189 singletransaction = True
190 190
191 191 """
192 192
193 193 from __future__ import absolute_import
194 194
195 195 # chistedit dependencies that are not available everywhere
196 196 try:
197 197 import fcntl
198 198 import termios
199 199 except ImportError:
200 200 fcntl = None
201 201 termios = None
202 202
203 203 import functools
204 204 import os
205 205 import struct
206 206
207 207 from mercurial.i18n import _
208 208 from mercurial.pycompat import (
209 209 getattr,
210 210 open,
211 211 )
212 212 from mercurial.node import (
213 213 bin,
214 214 hex,
215 215 short,
216 216 )
217 217 from mercurial import (
218 218 bundle2,
219 219 cmdutil,
220 220 context,
221 221 copies,
222 222 destutil,
223 223 discovery,
224 224 encoding,
225 225 error,
226 226 exchange,
227 227 extensions,
228 228 hg,
229 229 logcmdutil,
230 230 merge as mergemod,
231 231 mergestate as mergestatemod,
232 232 mergeutil,
233 233 obsolete,
234 234 pycompat,
235 235 registrar,
236 236 repair,
237 237 rewriteutil,
238 238 scmutil,
239 239 state as statemod,
240 240 util,
241 241 )
242 242 from mercurial.utils import (
243 243 dateutil,
244 244 stringutil,
245 245 urlutil,
246 246 )
247 247
248 248 pickle = util.pickle
249 249 cmdtable = {}
250 250 command = registrar.command(cmdtable)
251 251
252 252 configtable = {}
253 253 configitem = registrar.configitem(configtable)
254 254 configitem(
255 255 b'experimental',
256 256 b'histedit.autoverb',
257 257 default=False,
258 258 )
259 259 configitem(
260 260 b'histedit',
261 261 b'defaultrev',
262 262 default=None,
263 263 )
264 264 configitem(
265 265 b'histedit',
266 266 b'dropmissing',
267 267 default=False,
268 268 )
269 269 configitem(
270 270 b'histedit',
271 271 b'linelen',
272 272 default=80,
273 273 )
274 274 configitem(
275 275 b'histedit',
276 276 b'singletransaction',
277 277 default=False,
278 278 )
279 279 configitem(
280 280 b'ui',
281 281 b'interface.histedit',
282 282 default=None,
283 283 )
284 284 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
285 285 # TODO: Teach the text-based histedit interface to respect this config option
286 286 # before we make it non-experimental.
287 287 configitem(
288 288 b'histedit', b'later-commits-first', default=False, experimental=True
289 289 )
290 290
291 291 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
292 292 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
293 293 # be specifying the version(s) of Mercurial they are tested with, or
294 294 # leave the attribute unspecified.
295 295 testedwith = b'ships-with-hg-core'
296 296
297 297 actiontable = {}
298 298 primaryactions = set()
299 299 secondaryactions = set()
300 300 tertiaryactions = set()
301 301 internalactions = set()
302 302
303 303
304 304 def geteditcomment(ui, first, last):
305 305 """construct the editor comment
306 306 The comment includes::
307 307 - an intro
308 308 - sorted primary commands
309 309 - sorted short commands
310 310 - sorted long commands
311 311 - additional hints
312 312
313 313 Commands are only included once.
314 314 """
315 315 intro = _(
316 316 b"""Edit history between %s and %s
317 317
318 318 Commits are listed from least to most recent
319 319
320 320 You can reorder changesets by reordering the lines
321 321
322 322 Commands:
323 323 """
324 324 )
325 325 actions = []
326 326
327 327 def addverb(v):
328 328 a = actiontable[v]
329 329 lines = a.message.split(b"\n")
330 330 if len(a.verbs):
331 331 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
332 332 actions.append(b" %s = %s" % (v, lines[0]))
333 333 actions.extend([b' %s'] * (len(lines) - 1))
334 334
335 335 for v in (
336 336 sorted(primaryactions)
337 337 + sorted(secondaryactions)
338 338 + sorted(tertiaryactions)
339 339 ):
340 340 addverb(v)
341 341 actions.append(b'')
342 342
343 343 hints = []
344 344 if ui.configbool(b'histedit', b'dropmissing'):
345 345 hints.append(
346 346 b"Deleting a changeset from the list "
347 347 b"will DISCARD it from the edited history!"
348 348 )
349 349
350 350 lines = (intro % (first, last)).split(b'\n') + actions + hints
351 351
352 352 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
353 353
354 354
355 355 class histeditstate(object):
356 356 def __init__(self, repo):
357 357 self.repo = repo
358 358 self.actions = None
359 359 self.keep = None
360 360 self.topmost = None
361 361 self.parentctxnode = None
362 362 self.lock = None
363 363 self.wlock = None
364 364 self.backupfile = None
365 365 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
366 366 self.replacements = []
367 367
368 368 def read(self):
369 369 """Load histedit state from disk and set fields appropriately."""
370 370 if not self.stateobj.exists():
371 371 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
372 372
373 373 data = self._read()
374 374
375 375 self.parentctxnode = data[b'parentctxnode']
376 376 actions = parserules(data[b'rules'], self)
377 377 self.actions = actions
378 378 self.keep = data[b'keep']
379 379 self.topmost = data[b'topmost']
380 380 self.replacements = data[b'replacements']
381 381 self.backupfile = data[b'backupfile']
382 382
383 383 def _read(self):
384 384 fp = self.repo.vfs.read(b'histedit-state')
385 385 if fp.startswith(b'v1\n'):
386 386 data = self._load()
387 387 parentctxnode, rules, keep, topmost, replacements, backupfile = data
388 388 else:
389 389 data = pickle.loads(fp)
390 390 parentctxnode, rules, keep, topmost, replacements = data
391 391 backupfile = None
392 392 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
393 393
394 394 return {
395 395 b'parentctxnode': parentctxnode,
396 396 b"rules": rules,
397 397 b"keep": keep,
398 398 b"topmost": topmost,
399 399 b"replacements": replacements,
400 400 b"backupfile": backupfile,
401 401 }
402 402
403 403 def write(self, tr=None):
404 404 if tr:
405 405 tr.addfilegenerator(
406 406 b'histedit-state',
407 407 (b'histedit-state',),
408 408 self._write,
409 409 location=b'plain',
410 410 )
411 411 else:
412 412 with self.repo.vfs(b"histedit-state", b"w") as f:
413 413 self._write(f)
414 414
415 415 def _write(self, fp):
416 416 fp.write(b'v1\n')
417 417 fp.write(b'%s\n' % hex(self.parentctxnode))
418 418 fp.write(b'%s\n' % hex(self.topmost))
419 419 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
420 420 fp.write(b'%d\n' % len(self.actions))
421 421 for action in self.actions:
422 422 fp.write(b'%s\n' % action.tostate())
423 423 fp.write(b'%d\n' % len(self.replacements))
424 424 for replacement in self.replacements:
425 425 fp.write(
426 426 b'%s%s\n'
427 427 % (
428 428 hex(replacement[0]),
429 429 b''.join(hex(r) for r in replacement[1]),
430 430 )
431 431 )
432 432 backupfile = self.backupfile
433 433 if not backupfile:
434 434 backupfile = b''
435 435 fp.write(b'%s\n' % backupfile)
436 436
437 437 def _load(self):
438 438 fp = self.repo.vfs(b'histedit-state', b'r')
439 439 lines = [l[:-1] for l in fp.readlines()]
440 440
441 441 index = 0
442 442 lines[index] # version number
443 443 index += 1
444 444
445 445 parentctxnode = bin(lines[index])
446 446 index += 1
447 447
448 448 topmost = bin(lines[index])
449 449 index += 1
450 450
451 451 keep = lines[index] == b'True'
452 452 index += 1
453 453
454 454 # Rules
455 455 rules = []
456 456 rulelen = int(lines[index])
457 457 index += 1
458 458 for i in pycompat.xrange(rulelen):
459 459 ruleaction = lines[index]
460 460 index += 1
461 461 rule = lines[index]
462 462 index += 1
463 463 rules.append((ruleaction, rule))
464 464
465 465 # Replacements
466 466 replacements = []
467 467 replacementlen = int(lines[index])
468 468 index += 1
469 469 for i in pycompat.xrange(replacementlen):
470 470 replacement = lines[index]
471 471 original = bin(replacement[:40])
472 472 succ = [
473 473 bin(replacement[i : i + 40])
474 474 for i in range(40, len(replacement), 40)
475 475 ]
476 476 replacements.append((original, succ))
477 477 index += 1
478 478
479 479 backupfile = lines[index]
480 480 index += 1
481 481
482 482 fp.close()
483 483
484 484 return parentctxnode, rules, keep, topmost, replacements, backupfile
485 485
486 486 def clear(self):
487 487 if self.inprogress():
488 488 self.repo.vfs.unlink(b'histedit-state')
489 489
490 490 def inprogress(self):
491 491 return self.repo.vfs.exists(b'histedit-state')
492 492
493 493
494 494 class histeditaction(object):
495 495 def __init__(self, state, node):
496 496 self.state = state
497 497 self.repo = state.repo
498 498 self.node = node
499 499
500 500 @classmethod
501 501 def fromrule(cls, state, rule):
502 502 """Parses the given rule, returning an instance of the histeditaction."""
503 503 ruleid = rule.strip().split(b' ', 1)[0]
504 504 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
505 505 # Check for validation of rule ids and get the rulehash
506 506 try:
507 507 rev = bin(ruleid)
508 508 except TypeError:
509 509 try:
510 510 _ctx = scmutil.revsingle(state.repo, ruleid)
511 511 rulehash = _ctx.hex()
512 512 rev = bin(rulehash)
513 513 except error.RepoLookupError:
514 514 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
515 515 return cls(state, rev)
516 516
517 517 def verify(self, prev, expected, seen):
518 518 """Verifies semantic correctness of the rule"""
519 519 repo = self.repo
520 520 ha = hex(self.node)
521 521 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
522 522 if self.node is None:
523 523 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
524 524 self._verifynodeconstraints(prev, expected, seen)
525 525
526 526 def _verifynodeconstraints(self, prev, expected, seen):
527 527 # by default command need a node in the edited list
528 528 if self.node not in expected:
529 529 raise error.ParseError(
530 530 _(b'%s "%s" changeset was not a candidate')
531 531 % (self.verb, short(self.node)),
532 532 hint=_(b'only use listed changesets'),
533 533 )
534 534 # and only one command per node
535 535 if self.node in seen:
536 536 raise error.ParseError(
537 537 _(b'duplicated command for changeset %s') % short(self.node)
538 538 )
539 539
540 540 def torule(self):
541 541 """build a histedit rule line for an action
542 542
543 543 by default lines are in the form:
544 544 <hash> <rev> <summary>
545 545 """
546 546 ctx = self.repo[self.node]
547 547 ui = self.repo.ui
548 548 # We don't want color codes in the commit message template, so
549 549 # disable the label() template function while we render it.
550 550 with ui.configoverride(
551 551 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
552 552 ):
553 553 summary = cmdutil.rendertemplate(
554 554 ctx, ui.config(b'histedit', b'summary-template')
555 555 )
556 556 # Handle the fact that `''.splitlines() => []`
557 557 summary = summary.splitlines()[0] if summary else b''
558 558 line = b'%s %s %s' % (self.verb, ctx, summary)
559 559 # trim to 75 columns by default so it's not stupidly wide in my editor
560 560 # (the 5 more are left for verb)
561 561 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
562 562 maxlen = max(maxlen, 22) # avoid truncating hash
563 563 return stringutil.ellipsis(line, maxlen)
564 564
565 565 def tostate(self):
566 566 """Print an action in format used by histedit state files
567 567 (the first line is a verb, the remainder is the second)
568 568 """
569 569 return b"%s\n%s" % (self.verb, hex(self.node))
570 570
571 571 def run(self):
572 572 """Runs the action. The default behavior is simply apply the action's
573 573 rulectx onto the current parentctx."""
574 574 self.applychange()
575 575 self.continuedirty()
576 576 return self.continueclean()
577 577
578 578 def applychange(self):
579 579 """Applies the changes from this action's rulectx onto the current
580 580 parentctx, but does not commit them."""
581 581 repo = self.repo
582 582 rulectx = repo[self.node]
583 583 with repo.ui.silent():
584 584 hg.update(repo, self.state.parentctxnode, quietempty=True)
585 585 stats = applychanges(repo.ui, repo, rulectx, {})
586 586 repo.dirstate.setbranch(rulectx.branch())
587 587 if stats.unresolvedcount:
588 588 raise error.InterventionRequired(
589 589 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
590 590 hint=_(b'hg histedit --continue to resume'),
591 591 )
592 592
593 593 def continuedirty(self):
594 594 """Continues the action when changes have been applied to the working
595 595 copy. The default behavior is to commit the dirty changes."""
596 596 repo = self.repo
597 597 rulectx = repo[self.node]
598 598
599 599 editor = self.commiteditor()
600 600 commit = commitfuncfor(repo, rulectx)
601 601 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
602 602 date = dateutil.makedate()
603 603 else:
604 604 date = rulectx.date()
605 605 commit(
606 606 text=rulectx.description(),
607 607 user=rulectx.user(),
608 608 date=date,
609 609 extra=rulectx.extra(),
610 610 editor=editor,
611 611 )
612 612
613 613 def commiteditor(self):
614 614 """The editor to be used to edit the commit message."""
615 615 return False
616 616
617 617 def continueclean(self):
618 618 """Continues the action when the working copy is clean. The default
619 619 behavior is to accept the current commit as the new version of the
620 620 rulectx."""
621 621 ctx = self.repo[b'.']
622 622 if ctx.node() == self.state.parentctxnode:
623 623 self.repo.ui.warn(
624 624 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
625 625 )
626 626 return ctx, [(self.node, tuple())]
627 627 if ctx.node() == self.node:
628 628 # Nothing changed
629 629 return ctx, []
630 630 return ctx, [(self.node, (ctx.node(),))]
631 631
632 632
633 633 def commitfuncfor(repo, src):
634 634 """Build a commit function for the replacement of <src>
635 635
636 636 This function ensure we apply the same treatment to all changesets.
637 637
638 638 - Add a 'histedit_source' entry in extra.
639 639
640 640 Note that fold has its own separated logic because its handling is a bit
641 641 different and not easily factored out of the fold method.
642 642 """
643 643 phasemin = src.phase()
644 644
645 645 def commitfunc(**kwargs):
646 646 overrides = {(b'phases', b'new-commit'): phasemin}
647 647 with repo.ui.configoverride(overrides, b'histedit'):
648 648 extra = kwargs.get('extra', {}).copy()
649 649 extra[b'histedit_source'] = src.hex()
650 650 kwargs['extra'] = extra
651 651 return repo.commit(**kwargs)
652 652
653 653 return commitfunc
654 654
655 655
656 656 def applychanges(ui, repo, ctx, opts):
657 657 """Merge changeset from ctx (only) in the current working directory"""
658 658 if ctx.p1().node() == repo.dirstate.p1():
659 659 # edits are "in place" we do not need to make any merge,
660 660 # just applies changes on parent for editing
661 661 with ui.silent():
662 662 cmdutil.revert(ui, repo, ctx, all=True)
663 663 stats = mergemod.updateresult(0, 0, 0, 0)
664 664 else:
665 665 try:
666 666 # ui.forcemerge is an internal variable, do not document
667 667 repo.ui.setconfig(
668 668 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
669 669 )
670 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
670 stats = mergemod.graft(
671 repo,
672 ctx,
673 labels=[
674 b'already edited',
675 b'current change',
676 b'parent of current change',
677 ],
678 )
671 679 finally:
672 680 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
673 681 return stats
674 682
675 683
676 684 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
677 685 """collapse the set of revisions from first to last as new one.
678 686
679 687 Expected commit options are:
680 688 - message
681 689 - date
682 690 - username
683 691 Commit message is edited in all cases.
684 692
685 693 This function works in memory."""
686 694 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
687 695 if not ctxs:
688 696 return None
689 697 for c in ctxs:
690 698 if not c.mutable():
691 699 raise error.ParseError(
692 700 _(b"cannot fold into public change %s") % short(c.node())
693 701 )
694 702 base = firstctx.p1()
695 703
696 704 # commit a new version of the old changeset, including the update
697 705 # collect all files which might be affected
698 706 files = set()
699 707 for ctx in ctxs:
700 708 files.update(ctx.files())
701 709
702 710 # Recompute copies (avoid recording a -> b -> a)
703 711 copied = copies.pathcopies(base, lastctx)
704 712
705 713 # prune files which were reverted by the updates
706 714 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
707 715 # commit version of these files as defined by head
708 716 headmf = lastctx.manifest()
709 717
710 718 def filectxfn(repo, ctx, path):
711 719 if path in headmf:
712 720 fctx = lastctx[path]
713 721 flags = fctx.flags()
714 722 mctx = context.memfilectx(
715 723 repo,
716 724 ctx,
717 725 fctx.path(),
718 726 fctx.data(),
719 727 islink=b'l' in flags,
720 728 isexec=b'x' in flags,
721 729 copysource=copied.get(path),
722 730 )
723 731 return mctx
724 732 return None
725 733
726 734 if commitopts.get(b'message'):
727 735 message = commitopts[b'message']
728 736 else:
729 737 message = firstctx.description()
730 738 user = commitopts.get(b'user')
731 739 date = commitopts.get(b'date')
732 740 extra = commitopts.get(b'extra')
733 741
734 742 parents = (firstctx.p1().node(), firstctx.p2().node())
735 743 editor = None
736 744 if not skipprompt:
737 745 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
738 746 new = context.memctx(
739 747 repo,
740 748 parents=parents,
741 749 text=message,
742 750 files=files,
743 751 filectxfn=filectxfn,
744 752 user=user,
745 753 date=date,
746 754 extra=extra,
747 755 editor=editor,
748 756 )
749 757 return repo.commitctx(new)
750 758
751 759
752 760 def _isdirtywc(repo):
753 761 return repo[None].dirty(missing=True)
754 762
755 763
756 764 def abortdirty():
757 765 raise error.StateError(
758 766 _(b'working copy has pending changes'),
759 767 hint=_(
760 768 b'amend, commit, or revert them and run histedit '
761 769 b'--continue, or abort with histedit --abort'
762 770 ),
763 771 )
764 772
765 773
766 774 def action(verbs, message, priority=False, internal=False):
767 775 def wrap(cls):
768 776 assert not priority or not internal
769 777 verb = verbs[0]
770 778 if priority:
771 779 primaryactions.add(verb)
772 780 elif internal:
773 781 internalactions.add(verb)
774 782 elif len(verbs) > 1:
775 783 secondaryactions.add(verb)
776 784 else:
777 785 tertiaryactions.add(verb)
778 786
779 787 cls.verb = verb
780 788 cls.verbs = verbs
781 789 cls.message = message
782 790 for verb in verbs:
783 791 actiontable[verb] = cls
784 792 return cls
785 793
786 794 return wrap
787 795
788 796
789 797 @action([b'pick', b'p'], _(b'use commit'), priority=True)
790 798 class pick(histeditaction):
791 799 def run(self):
792 800 rulectx = self.repo[self.node]
793 801 if rulectx.p1().node() == self.state.parentctxnode:
794 802 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
795 803 return rulectx, []
796 804
797 805 return super(pick, self).run()
798 806
799 807
800 808 @action(
801 809 [b'edit', b'e'],
802 810 _(b'use commit, but allow edits before making new commit'),
803 811 priority=True,
804 812 )
805 813 class edit(histeditaction):
806 814 def run(self):
807 815 repo = self.repo
808 816 rulectx = repo[self.node]
809 817 hg.update(repo, self.state.parentctxnode, quietempty=True)
810 818 applychanges(repo.ui, repo, rulectx, {})
811 819 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
812 820 raise error.InterventionRequired(
813 821 _(b'Editing (%s), commit as needed now to split the change')
814 822 % short(self.node),
815 823 hint=hint % short(self.node),
816 824 )
817 825
818 826 def commiteditor(self):
819 827 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
820 828
821 829
822 830 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
823 831 class fold(histeditaction):
824 832 def verify(self, prev, expected, seen):
825 833 """Verifies semantic correctness of the fold rule"""
826 834 super(fold, self).verify(prev, expected, seen)
827 835 repo = self.repo
828 836 if not prev:
829 837 c = repo[self.node].p1()
830 838 elif not prev.verb in (b'pick', b'base'):
831 839 return
832 840 else:
833 841 c = repo[prev.node]
834 842 if not c.mutable():
835 843 raise error.ParseError(
836 844 _(b"cannot fold into public change %s") % short(c.node())
837 845 )
838 846
839 847 def continuedirty(self):
840 848 repo = self.repo
841 849 rulectx = repo[self.node]
842 850
843 851 commit = commitfuncfor(repo, rulectx)
844 852 commit(
845 853 text=b'fold-temp-revision %s' % short(self.node),
846 854 user=rulectx.user(),
847 855 date=rulectx.date(),
848 856 extra=rulectx.extra(),
849 857 )
850 858
851 859 def continueclean(self):
852 860 repo = self.repo
853 861 ctx = repo[b'.']
854 862 rulectx = repo[self.node]
855 863 parentctxnode = self.state.parentctxnode
856 864 if ctx.node() == parentctxnode:
857 865 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
858 866 return ctx, [(self.node, (parentctxnode,))]
859 867
860 868 parentctx = repo[parentctxnode]
861 869 newcommits = {
862 870 c.node()
863 871 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
864 872 }
865 873 if not newcommits:
866 874 repo.ui.warn(
867 875 _(
868 876 b'%s: cannot fold - working copy is not a '
869 877 b'descendant of previous commit %s\n'
870 878 )
871 879 % (short(self.node), short(parentctxnode))
872 880 )
873 881 return ctx, [(self.node, (ctx.node(),))]
874 882
875 883 middlecommits = newcommits.copy()
876 884 middlecommits.discard(ctx.node())
877 885
878 886 return self.finishfold(
879 887 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
880 888 )
881 889
882 890 def skipprompt(self):
883 891 """Returns true if the rule should skip the message editor.
884 892
885 893 For example, 'fold' wants to show an editor, but 'rollup'
886 894 doesn't want to.
887 895 """
888 896 return False
889 897
890 898 def mergedescs(self):
891 899 """Returns true if the rule should merge messages of multiple changes.
892 900
893 901 This exists mainly so that 'rollup' rules can be a subclass of
894 902 'fold'.
895 903 """
896 904 return True
897 905
898 906 def firstdate(self):
899 907 """Returns true if the rule should preserve the date of the first
900 908 change.
901 909
902 910 This exists mainly so that 'rollup' rules can be a subclass of
903 911 'fold'.
904 912 """
905 913 return False
906 914
907 915 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
908 916 mergemod.update(ctx.p1())
909 917 ### prepare new commit data
910 918 commitopts = {}
911 919 commitopts[b'user'] = ctx.user()
912 920 # commit message
913 921 if not self.mergedescs():
914 922 newmessage = ctx.description()
915 923 else:
916 924 newmessage = (
917 925 b'\n***\n'.join(
918 926 [ctx.description()]
919 927 + [repo[r].description() for r in internalchanges]
920 928 + [oldctx.description()]
921 929 )
922 930 + b'\n'
923 931 )
924 932 commitopts[b'message'] = newmessage
925 933 # date
926 934 if self.firstdate():
927 935 commitopts[b'date'] = ctx.date()
928 936 else:
929 937 commitopts[b'date'] = max(ctx.date(), oldctx.date())
930 938 # if date is to be updated to current
931 939 if ui.configbool(b'rewrite', b'update-timestamp'):
932 940 commitopts[b'date'] = dateutil.makedate()
933 941
934 942 extra = ctx.extra().copy()
935 943 # histedit_source
936 944 # note: ctx is likely a temporary commit but that the best we can do
937 945 # here. This is sufficient to solve issue3681 anyway.
938 946 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
939 947 commitopts[b'extra'] = extra
940 948 phasemin = max(ctx.phase(), oldctx.phase())
941 949 overrides = {(b'phases', b'new-commit'): phasemin}
942 950 with repo.ui.configoverride(overrides, b'histedit'):
943 951 n = collapse(
944 952 repo,
945 953 ctx,
946 954 repo[newnode],
947 955 commitopts,
948 956 skipprompt=self.skipprompt(),
949 957 )
950 958 if n is None:
951 959 return ctx, []
952 960 mergemod.update(repo[n])
953 961 replacements = [
954 962 (oldctx.node(), (newnode,)),
955 963 (ctx.node(), (n,)),
956 964 (newnode, (n,)),
957 965 ]
958 966 for ich in internalchanges:
959 967 replacements.append((ich, (n,)))
960 968 return repo[n], replacements
961 969
962 970
963 971 @action(
964 972 [b'base', b'b'],
965 973 _(b'checkout changeset and apply further changesets from there'),
966 974 )
967 975 class base(histeditaction):
968 976 def run(self):
969 977 if self.repo[b'.'].node() != self.node:
970 978 mergemod.clean_update(self.repo[self.node])
971 979 return self.continueclean()
972 980
973 981 def continuedirty(self):
974 982 abortdirty()
975 983
976 984 def continueclean(self):
977 985 basectx = self.repo[b'.']
978 986 return basectx, []
979 987
980 988 def _verifynodeconstraints(self, prev, expected, seen):
981 989 # base can only be use with a node not in the edited set
982 990 if self.node in expected:
983 991 msg = _(b'%s "%s" changeset was an edited list candidate')
984 992 raise error.ParseError(
985 993 msg % (self.verb, short(self.node)),
986 994 hint=_(b'base must only use unlisted changesets'),
987 995 )
988 996
989 997
990 998 @action(
991 999 [b'_multifold'],
992 1000 _(
993 1001 """fold subclass used for when multiple folds happen in a row
994 1002
995 1003 We only want to fire the editor for the folded message once when
996 1004 (say) four changes are folded down into a single change. This is
997 1005 similar to rollup, but we should preserve both messages so that
998 1006 when the last fold operation runs we can show the user all the
999 1007 commit messages in their editor.
1000 1008 """
1001 1009 ),
1002 1010 internal=True,
1003 1011 )
1004 1012 class _multifold(fold):
1005 1013 def skipprompt(self):
1006 1014 return True
1007 1015
1008 1016
1009 1017 @action(
1010 1018 [b"roll", b"r"],
1011 1019 _(b"like fold, but discard this commit's description and date"),
1012 1020 )
1013 1021 class rollup(fold):
1014 1022 def mergedescs(self):
1015 1023 return False
1016 1024
1017 1025 def skipprompt(self):
1018 1026 return True
1019 1027
1020 1028 def firstdate(self):
1021 1029 return True
1022 1030
1023 1031
1024 1032 @action([b"drop", b"d"], _(b'remove commit from history'))
1025 1033 class drop(histeditaction):
1026 1034 def run(self):
1027 1035 parentctx = self.repo[self.state.parentctxnode]
1028 1036 return parentctx, [(self.node, tuple())]
1029 1037
1030 1038
1031 1039 @action(
1032 1040 [b"mess", b"m"],
1033 1041 _(b'edit commit message without changing commit content'),
1034 1042 priority=True,
1035 1043 )
1036 1044 class message(histeditaction):
1037 1045 def commiteditor(self):
1038 1046 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1039 1047
1040 1048
1041 1049 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1042 1050 """utility function to find the first outgoing changeset
1043 1051
1044 1052 Used by initialization code"""
1045 1053 if opts is None:
1046 1054 opts = {}
1047 1055 path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote)
1048 1056 dest = path.pushloc or path.loc
1049 1057
1050 1058 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
1051 1059
1052 1060 revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None)
1053 1061 other = hg.peer(repo, opts, dest)
1054 1062
1055 1063 if revs:
1056 1064 revs = [repo.lookup(rev) for rev in revs]
1057 1065
1058 1066 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1059 1067 if not outgoing.missing:
1060 1068 raise error.StateError(_(b'no outgoing ancestors'))
1061 1069 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1062 1070 if len(roots) > 1:
1063 1071 msg = _(b'there are ambiguous outgoing revisions')
1064 1072 hint = _(b"see 'hg help histedit' for more detail")
1065 1073 raise error.StateError(msg, hint=hint)
1066 1074 return repo[roots[0]].node()
1067 1075
1068 1076
1069 1077 # Curses Support
1070 1078 try:
1071 1079 import curses
1072 1080 except ImportError:
1073 1081 curses = None
1074 1082
1075 1083 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1076 1084 ACTION_LABELS = {
1077 1085 b'fold': b'^fold',
1078 1086 b'roll': b'^roll',
1079 1087 }
1080 1088
1081 1089 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1082 1090 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1083 1091 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1084 1092
1085 1093 E_QUIT, E_HISTEDIT = 1, 2
1086 1094 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1087 1095 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1088 1096
1089 1097 KEYTABLE = {
1090 1098 b'global': {
1091 1099 b'h': b'next-action',
1092 1100 b'KEY_RIGHT': b'next-action',
1093 1101 b'l': b'prev-action',
1094 1102 b'KEY_LEFT': b'prev-action',
1095 1103 b'q': b'quit',
1096 1104 b'c': b'histedit',
1097 1105 b'C': b'histedit',
1098 1106 b'v': b'showpatch',
1099 1107 b'?': b'help',
1100 1108 },
1101 1109 MODE_RULES: {
1102 1110 b'd': b'action-drop',
1103 1111 b'e': b'action-edit',
1104 1112 b'f': b'action-fold',
1105 1113 b'm': b'action-mess',
1106 1114 b'p': b'action-pick',
1107 1115 b'r': b'action-roll',
1108 1116 b' ': b'select',
1109 1117 b'j': b'down',
1110 1118 b'k': b'up',
1111 1119 b'KEY_DOWN': b'down',
1112 1120 b'KEY_UP': b'up',
1113 1121 b'J': b'move-down',
1114 1122 b'K': b'move-up',
1115 1123 b'KEY_NPAGE': b'move-down',
1116 1124 b'KEY_PPAGE': b'move-up',
1117 1125 b'0': b'goto', # Used for 0..9
1118 1126 },
1119 1127 MODE_PATCH: {
1120 1128 b' ': b'page-down',
1121 1129 b'KEY_NPAGE': b'page-down',
1122 1130 b'KEY_PPAGE': b'page-up',
1123 1131 b'j': b'line-down',
1124 1132 b'k': b'line-up',
1125 1133 b'KEY_DOWN': b'line-down',
1126 1134 b'KEY_UP': b'line-up',
1127 1135 b'J': b'down',
1128 1136 b'K': b'up',
1129 1137 },
1130 1138 MODE_HELP: {},
1131 1139 }
1132 1140
1133 1141
1134 1142 def screen_size():
1135 1143 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1136 1144
1137 1145
1138 1146 class histeditrule(object):
1139 1147 def __init__(self, ui, ctx, pos, action=b'pick'):
1140 1148 self.ui = ui
1141 1149 self.ctx = ctx
1142 1150 self.action = action
1143 1151 self.origpos = pos
1144 1152 self.pos = pos
1145 1153 self.conflicts = []
1146 1154
1147 1155 def __bytes__(self):
1148 1156 # Example display of several histeditrules:
1149 1157 #
1150 1158 # #10 pick 316392:06a16c25c053 add option to skip tests
1151 1159 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1152 1160 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1153 1161 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1154 1162 #
1155 1163 # The carets point to the changeset being folded into ("roll this
1156 1164 # changeset into the changeset above").
1157 1165 return b'%s%s' % (self.prefix, self.desc)
1158 1166
1159 1167 __str__ = encoding.strmethod(__bytes__)
1160 1168
1161 1169 @property
1162 1170 def prefix(self):
1163 1171 # Some actions ('fold' and 'roll') combine a patch with a
1164 1172 # previous one. Add a marker showing which patch they apply
1165 1173 # to.
1166 1174 action = ACTION_LABELS.get(self.action, self.action)
1167 1175
1168 1176 h = self.ctx.hex()[0:12]
1169 1177 r = self.ctx.rev()
1170 1178
1171 1179 return b"#%s %s %d:%s " % (
1172 1180 (b'%d' % self.origpos).ljust(2),
1173 1181 action.ljust(6),
1174 1182 r,
1175 1183 h,
1176 1184 )
1177 1185
1178 1186 @util.propertycache
1179 1187 def desc(self):
1180 1188 summary = cmdutil.rendertemplate(
1181 1189 self.ctx, self.ui.config(b'histedit', b'summary-template')
1182 1190 )
1183 1191 if summary:
1184 1192 return summary
1185 1193 # This is split off from the prefix property so that we can
1186 1194 # separately make the description for 'roll' red (since it
1187 1195 # will get discarded).
1188 1196 return self.ctx.description().splitlines()[0].strip()
1189 1197
1190 1198 def checkconflicts(self, other):
1191 1199 if other.pos > self.pos and other.origpos <= self.origpos:
1192 1200 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1193 1201 self.conflicts.append(other)
1194 1202 return self.conflicts
1195 1203
1196 1204 if other in self.conflicts:
1197 1205 self.conflicts.remove(other)
1198 1206 return self.conflicts
1199 1207
1200 1208
1201 1209 def makecommands(rules):
1202 1210 """Returns a list of commands consumable by histedit --commands based on
1203 1211 our list of rules"""
1204 1212 commands = []
1205 1213 for rules in rules:
1206 1214 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1207 1215 return commands
1208 1216
1209 1217
1210 1218 def addln(win, y, x, line, color=None):
1211 1219 """Add a line to the given window left padding but 100% filled with
1212 1220 whitespace characters, so that the color appears on the whole line"""
1213 1221 maxy, maxx = win.getmaxyx()
1214 1222 length = maxx - 1 - x
1215 1223 line = bytes(line).ljust(length)[:length]
1216 1224 if y < 0:
1217 1225 y = maxy + y
1218 1226 if x < 0:
1219 1227 x = maxx + x
1220 1228 if color:
1221 1229 win.addstr(y, x, line, color)
1222 1230 else:
1223 1231 win.addstr(y, x, line)
1224 1232
1225 1233
1226 1234 def _trunc_head(line, n):
1227 1235 if len(line) <= n:
1228 1236 return line
1229 1237 return b'> ' + line[-(n - 2) :]
1230 1238
1231 1239
1232 1240 def _trunc_tail(line, n):
1233 1241 if len(line) <= n:
1234 1242 return line
1235 1243 return line[: n - 2] + b' >'
1236 1244
1237 1245
1238 1246 class _chistedit_state(object):
1239 1247 def __init__(
1240 1248 self,
1241 1249 repo,
1242 1250 rules,
1243 1251 stdscr,
1244 1252 ):
1245 1253 self.repo = repo
1246 1254 self.rules = rules
1247 1255 self.stdscr = stdscr
1248 1256 self.later_on_top = repo.ui.configbool(
1249 1257 b'histedit', b'later-commits-first'
1250 1258 )
1251 1259 # The current item in display order, initialized to point to the top
1252 1260 # of the screen.
1253 1261 self.pos = 0
1254 1262 self.selected = None
1255 1263 self.mode = (MODE_INIT, MODE_INIT)
1256 1264 self.page_height = None
1257 1265 self.modes = {
1258 1266 MODE_RULES: {
1259 1267 b'line_offset': 0,
1260 1268 },
1261 1269 MODE_PATCH: {
1262 1270 b'line_offset': 0,
1263 1271 },
1264 1272 }
1265 1273
1266 1274 def render_commit(self, win):
1267 1275 """Renders the commit window that shows the log of the current selected
1268 1276 commit"""
1269 1277 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1270 1278
1271 1279 ctx = rule.ctx
1272 1280 win.box()
1273 1281
1274 1282 maxy, maxx = win.getmaxyx()
1275 1283 length = maxx - 3
1276 1284
1277 1285 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1278 1286 win.addstr(1, 1, line[:length])
1279 1287
1280 1288 line = b"user: %s" % ctx.user()
1281 1289 win.addstr(2, 1, line[:length])
1282 1290
1283 1291 bms = self.repo.nodebookmarks(ctx.node())
1284 1292 line = b"bookmark: %s" % b' '.join(bms)
1285 1293 win.addstr(3, 1, line[:length])
1286 1294
1287 1295 line = b"summary: %s" % (ctx.description().splitlines()[0])
1288 1296 win.addstr(4, 1, line[:length])
1289 1297
1290 1298 line = b"files: "
1291 1299 win.addstr(5, 1, line)
1292 1300 fnx = 1 + len(line)
1293 1301 fnmaxx = length - fnx + 1
1294 1302 y = 5
1295 1303 fnmaxn = maxy - (1 + y) - 1
1296 1304 files = ctx.files()
1297 1305 for i, line1 in enumerate(files):
1298 1306 if len(files) > fnmaxn and i == fnmaxn - 1:
1299 1307 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1300 1308 y = y + 1
1301 1309 break
1302 1310 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1303 1311 y = y + 1
1304 1312
1305 1313 conflicts = rule.conflicts
1306 1314 if len(conflicts) > 0:
1307 1315 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1308 1316 conflictstr = b"changed files overlap with %s" % conflictstr
1309 1317 else:
1310 1318 conflictstr = b'no overlap'
1311 1319
1312 1320 win.addstr(y, 1, conflictstr[:length])
1313 1321 win.noutrefresh()
1314 1322
1315 1323 def helplines(self):
1316 1324 if self.mode[0] == MODE_PATCH:
1317 1325 help = b"""\
1318 1326 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1319 1327 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1320 1328 """
1321 1329 else:
1322 1330 help = b"""\
1323 1331 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1324 1332 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1325 1333 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1326 1334 """
1335 if self.later_on_top:
1336 help += b"Newer commits are shown above older commits.\n"
1337 else:
1338 help += b"Older commits are shown above newer commits.\n"
1327 1339 return help.splitlines()
1328 1340
1329 1341 def render_help(self, win):
1330 1342 maxy, maxx = win.getmaxyx()
1331 1343 for y, line in enumerate(self.helplines()):
1332 1344 if y >= maxy:
1333 1345 break
1334 1346 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1335 1347 win.noutrefresh()
1336 1348
1337 1349 def layout(self):
1338 1350 maxy, maxx = self.stdscr.getmaxyx()
1339 1351 helplen = len(self.helplines())
1340 1352 mainlen = maxy - helplen - 12
1341 1353 if mainlen < 1:
1342 1354 raise error.Abort(
1343 1355 _(b"terminal dimensions %d by %d too small for curses histedit")
1344 1356 % (maxy, maxx),
1345 1357 hint=_(
1346 1358 b"enlarge your terminal or use --config ui.interface=text"
1347 1359 ),
1348 1360 )
1349 1361 return {
1350 1362 b'commit': (12, maxx),
1351 1363 b'help': (helplen, maxx),
1352 1364 b'main': (mainlen, maxx),
1353 1365 }
1354 1366
1355 1367 def display_pos_to_rule_pos(self, display_pos):
1356 1368 """Converts a position in display order to rule order.
1357 1369
1358 1370 The `display_pos` is the order from the top in display order, not
1359 1371 considering which items are currently visible on the screen. Thus,
1360 1372 `display_pos=0` is the item at the top (possibly after scrolling to
1361 1373 the top)
1362 1374 """
1363 1375 if self.later_on_top:
1364 1376 return len(self.rules) - 1 - display_pos
1365 1377 else:
1366 1378 return display_pos
1367 1379
1368 1380 def render_rules(self, rulesscr):
1369 1381 start = self.modes[MODE_RULES][b'line_offset']
1370 1382
1371 1383 conflicts = [r.ctx for r in self.rules if r.conflicts]
1372 1384 if len(conflicts) > 0:
1373 1385 line = b"potential conflict in %s" % b','.join(
1374 1386 map(pycompat.bytestr, conflicts)
1375 1387 )
1376 1388 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1377 1389
1378 1390 for display_pos in range(start, len(self.rules)):
1379 1391 y = display_pos - start
1380 1392 if y < 0 or y >= self.page_height:
1381 1393 continue
1382 1394 rule_pos = self.display_pos_to_rule_pos(display_pos)
1383 1395 rule = self.rules[rule_pos]
1384 1396 if len(rule.conflicts) > 0:
1385 1397 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1386 1398 else:
1387 1399 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1388 1400
1389 1401 if display_pos == self.selected:
1390 1402 rollcolor = COLOR_ROLL_SELECTED
1391 1403 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1392 1404 elif display_pos == self.pos:
1393 1405 rollcolor = COLOR_ROLL_CURRENT
1394 1406 addln(
1395 1407 rulesscr,
1396 1408 y,
1397 1409 2,
1398 1410 rule,
1399 1411 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1400 1412 )
1401 1413 else:
1402 1414 rollcolor = COLOR_ROLL
1403 1415 addln(rulesscr, y, 2, rule)
1404 1416
1405 1417 if rule.action == b'roll':
1406 1418 rulesscr.addstr(
1407 1419 y,
1408 1420 2 + len(rule.prefix),
1409 1421 rule.desc,
1410 1422 curses.color_pair(rollcolor),
1411 1423 )
1412 1424
1413 1425 rulesscr.noutrefresh()
1414 1426
1415 1427 def render_string(self, win, output, diffcolors=False):
1416 1428 maxy, maxx = win.getmaxyx()
1417 1429 length = min(maxy - 1, len(output))
1418 1430 for y in range(0, length):
1419 1431 line = output[y]
1420 1432 if diffcolors:
1421 1433 if line and line[0] == b'+':
1422 1434 win.addstr(
1423 1435 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1424 1436 )
1425 1437 elif line and line[0] == b'-':
1426 1438 win.addstr(
1427 1439 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1428 1440 )
1429 1441 elif line.startswith(b'@@ '):
1430 1442 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1431 1443 else:
1432 1444 win.addstr(y, 0, line)
1433 1445 else:
1434 1446 win.addstr(y, 0, line)
1435 1447 win.noutrefresh()
1436 1448
1437 1449 def render_patch(self, win):
1438 1450 start = self.modes[MODE_PATCH][b'line_offset']
1439 1451 content = self.modes[MODE_PATCH][b'patchcontents']
1440 1452 self.render_string(win, content[start:], diffcolors=True)
1441 1453
1442 1454 def event(self, ch):
1443 1455 """Change state based on the current character input
1444 1456
1445 1457 This takes the current state and based on the current character input from
1446 1458 the user we change the state.
1447 1459 """
1448 1460 oldpos = self.pos
1449 1461
1450 1462 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1451 1463 return E_RESIZE
1452 1464
1453 1465 lookup_ch = ch
1454 1466 if ch is not None and b'0' <= ch <= b'9':
1455 1467 lookup_ch = b'0'
1456 1468
1457 1469 curmode, prevmode = self.mode
1458 1470 action = KEYTABLE[curmode].get(
1459 1471 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1460 1472 )
1461 1473 if action is None:
1462 1474 return
1463 1475 if action in (b'down', b'move-down'):
1464 1476 newpos = min(oldpos + 1, len(self.rules) - 1)
1465 1477 self.move_cursor(oldpos, newpos)
1466 1478 if self.selected is not None or action == b'move-down':
1467 1479 self.swap(oldpos, newpos)
1468 1480 elif action in (b'up', b'move-up'):
1469 1481 newpos = max(0, oldpos - 1)
1470 1482 self.move_cursor(oldpos, newpos)
1471 1483 if self.selected is not None or action == b'move-up':
1472 1484 self.swap(oldpos, newpos)
1473 1485 elif action == b'next-action':
1474 1486 self.cycle_action(oldpos, next=True)
1475 1487 elif action == b'prev-action':
1476 1488 self.cycle_action(oldpos, next=False)
1477 1489 elif action == b'select':
1478 1490 self.selected = oldpos if self.selected is None else None
1479 1491 self.make_selection(self.selected)
1480 1492 elif action == b'goto' and int(ch) < len(self.rules) <= 10:
1481 1493 newrule = next((r for r in self.rules if r.origpos == int(ch)))
1482 1494 self.move_cursor(oldpos, newrule.pos)
1483 1495 if self.selected is not None:
1484 1496 self.swap(oldpos, newrule.pos)
1485 1497 elif action.startswith(b'action-'):
1486 1498 self.change_action(oldpos, action[7:])
1487 1499 elif action == b'showpatch':
1488 1500 self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode)
1489 1501 elif action == b'help':
1490 1502 self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode)
1491 1503 elif action == b'quit':
1492 1504 return E_QUIT
1493 1505 elif action == b'histedit':
1494 1506 return E_HISTEDIT
1495 1507 elif action == b'page-down':
1496 1508 return E_PAGEDOWN
1497 1509 elif action == b'page-up':
1498 1510 return E_PAGEUP
1499 1511 elif action == b'line-down':
1500 1512 return E_LINEDOWN
1501 1513 elif action == b'line-up':
1502 1514 return E_LINEUP
1503 1515
1504 1516 def patch_contents(self):
1505 1517 repo = self.repo
1506 1518 rule = self.rules[self.display_pos_to_rule_pos(self.pos)]
1507 1519 displayer = logcmdutil.changesetdisplayer(
1508 1520 repo.ui,
1509 1521 repo,
1510 1522 {b"patch": True, b"template": b"status"},
1511 1523 buffered=True,
1512 1524 )
1513 1525 overrides = {(b'ui', b'verbose'): True}
1514 1526 with repo.ui.configoverride(overrides, source=b'histedit'):
1515 1527 displayer.show(rule.ctx)
1516 1528 displayer.close()
1517 1529 return displayer.hunk[rule.ctx.rev()].splitlines()
1518 1530
1519 1531 def move_cursor(self, oldpos, newpos):
1520 1532 """Change the rule/changeset that the cursor is pointing to, regardless of
1521 1533 current mode (you can switch between patches from the view patch window)."""
1522 1534 self.pos = newpos
1523 1535
1524 1536 mode, _ = self.mode
1525 1537 if mode == MODE_RULES:
1526 1538 # Scroll through the list by updating the view for MODE_RULES, so that
1527 1539 # even if we are not currently viewing the rules, switching back will
1528 1540 # result in the cursor's rule being visible.
1529 1541 modestate = self.modes[MODE_RULES]
1530 1542 if newpos < modestate[b'line_offset']:
1531 1543 modestate[b'line_offset'] = newpos
1532 1544 elif newpos > modestate[b'line_offset'] + self.page_height - 1:
1533 1545 modestate[b'line_offset'] = newpos - self.page_height + 1
1534 1546
1535 1547 # Reset the patch view region to the top of the new patch.
1536 1548 self.modes[MODE_PATCH][b'line_offset'] = 0
1537 1549
1538 1550 def change_mode(self, mode):
1539 1551 curmode, _ = self.mode
1540 1552 self.mode = (mode, curmode)
1541 1553 if mode == MODE_PATCH:
1542 1554 self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents()
1543 1555
1544 1556 def make_selection(self, pos):
1545 1557 self.selected = pos
1546 1558
1547 1559 def swap(self, oldpos, newpos):
1548 1560 """Swap two positions and calculate necessary conflicts in
1549 1561 O(|newpos-oldpos|) time"""
1550 1562 old_rule_pos = self.display_pos_to_rule_pos(oldpos)
1551 1563 new_rule_pos = self.display_pos_to_rule_pos(newpos)
1552 1564
1553 1565 rules = self.rules
1554 1566 assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules)
1555 1567
1556 1568 rules[old_rule_pos], rules[new_rule_pos] = (
1557 1569 rules[new_rule_pos],
1558 1570 rules[old_rule_pos],
1559 1571 )
1560 1572
1561 1573 # TODO: swap should not know about histeditrule's internals
1562 1574 rules[new_rule_pos].pos = new_rule_pos
1563 1575 rules[old_rule_pos].pos = old_rule_pos
1564 1576
1565 1577 start = min(old_rule_pos, new_rule_pos)
1566 1578 end = max(old_rule_pos, new_rule_pos)
1567 1579 for r in pycompat.xrange(start, end + 1):
1568 1580 rules[new_rule_pos].checkconflicts(rules[r])
1569 1581 rules[old_rule_pos].checkconflicts(rules[r])
1570 1582
1571 1583 if self.selected:
1572 1584 self.make_selection(newpos)
1573 1585
1574 1586 def change_action(self, pos, action):
1575 1587 """Change the action state on the given position to the new action"""
1576 1588 assert 0 <= pos < len(self.rules)
1577 1589 self.rules[pos].action = action
1578 1590
1579 1591 def cycle_action(self, pos, next=False):
1580 1592 """Changes the action state the next or the previous action from
1581 1593 the action list"""
1582 1594 assert 0 <= pos < len(self.rules)
1583 1595 current = self.rules[pos].action
1584 1596
1585 1597 assert current in KEY_LIST
1586 1598
1587 1599 index = KEY_LIST.index(current)
1588 1600 if next:
1589 1601 index += 1
1590 1602 else:
1591 1603 index -= 1
1592 1604 self.change_action(pos, KEY_LIST[index % len(KEY_LIST)])
1593 1605
1594 1606 def change_view(self, delta, unit):
1595 1607 """Change the region of whatever is being viewed (a patch or the list of
1596 1608 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1597 1609 mode, _ = self.mode
1598 1610 if mode != MODE_PATCH:
1599 1611 return
1600 1612 mode_state = self.modes[mode]
1601 1613 num_lines = len(mode_state[b'patchcontents'])
1602 1614 page_height = self.page_height
1603 1615 unit = page_height if unit == b'page' else 1
1604 1616 num_pages = 1 + (num_lines - 1) // page_height
1605 1617 max_offset = (num_pages - 1) * page_height
1606 1618 newline = mode_state[b'line_offset'] + delta * unit
1607 1619 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1608 1620
1609 1621
1610 1622 def _chisteditmain(repo, rules, stdscr):
1611 1623 try:
1612 1624 curses.use_default_colors()
1613 1625 except curses.error:
1614 1626 pass
1615 1627
1616 1628 # initialize color pattern
1617 1629 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1618 1630 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1619 1631 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1620 1632 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1621 1633 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1622 1634 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1623 1635 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1624 1636 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1625 1637 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1626 1638 curses.init_pair(
1627 1639 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1628 1640 )
1629 1641 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1630 1642
1631 1643 # don't display the cursor
1632 1644 try:
1633 1645 curses.curs_set(0)
1634 1646 except curses.error:
1635 1647 pass
1636 1648
1637 1649 def drawvertwin(size, y, x):
1638 1650 win = curses.newwin(size[0], size[1], y, x)
1639 1651 y += size[0]
1640 1652 return win, y, x
1641 1653
1642 1654 state = _chistedit_state(repo, rules, stdscr)
1643 1655
1644 1656 # eventloop
1645 1657 ch = None
1646 1658 stdscr.clear()
1647 1659 stdscr.refresh()
1648 1660 while True:
1649 1661 oldmode, unused = state.mode
1650 1662 if oldmode == MODE_INIT:
1651 1663 state.change_mode(MODE_RULES)
1652 1664 e = state.event(ch)
1653 1665
1654 1666 if e == E_QUIT:
1655 1667 return False
1656 1668 if e == E_HISTEDIT:
1657 1669 return state.rules
1658 1670 else:
1659 1671 if e == E_RESIZE:
1660 1672 size = screen_size()
1661 1673 if size != stdscr.getmaxyx():
1662 1674 curses.resizeterm(*size)
1663 1675
1664 1676 sizes = state.layout()
1665 1677 curmode, unused = state.mode
1666 1678 if curmode != oldmode:
1667 1679 state.page_height = sizes[b'main'][0]
1668 1680 # Adjust the view to fit the current screen size.
1669 1681 state.move_cursor(state.pos, state.pos)
1670 1682
1671 1683 # Pack the windows against the top, each pane spread across the
1672 1684 # full width of the screen.
1673 1685 y, x = (0, 0)
1674 1686 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1675 1687 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1676 1688 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1677 1689
1678 1690 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1679 1691 if e == E_PAGEDOWN:
1680 1692 state.change_view(+1, b'page')
1681 1693 elif e == E_PAGEUP:
1682 1694 state.change_view(-1, b'page')
1683 1695 elif e == E_LINEDOWN:
1684 1696 state.change_view(+1, b'line')
1685 1697 elif e == E_LINEUP:
1686 1698 state.change_view(-1, b'line')
1687 1699
1688 1700 # start rendering
1689 1701 commitwin.erase()
1690 1702 helpwin.erase()
1691 1703 mainwin.erase()
1692 1704 if curmode == MODE_PATCH:
1693 1705 state.render_patch(mainwin)
1694 1706 elif curmode == MODE_HELP:
1695 1707 state.render_string(mainwin, __doc__.strip().splitlines())
1696 1708 else:
1697 1709 state.render_rules(mainwin)
1698 1710 state.render_commit(commitwin)
1699 1711 state.render_help(helpwin)
1700 1712 curses.doupdate()
1701 1713 # done rendering
1702 1714 ch = encoding.strtolocal(stdscr.getkey())
1703 1715
1704 1716
1705 1717 def _chistedit(ui, repo, freeargs, opts):
1706 1718 """interactively edit changeset history via a curses interface
1707 1719
1708 1720 Provides a ncurses interface to histedit. Press ? in chistedit mode
1709 1721 to see an extensive help. Requires python-curses to be installed."""
1710 1722
1711 1723 if curses is None:
1712 1724 raise error.Abort(_(b"Python curses library required"))
1713 1725
1714 1726 # disable color
1715 1727 ui._colormode = None
1716 1728
1717 1729 try:
1718 1730 keep = opts.get(b'keep')
1719 1731 revs = opts.get(b'rev', [])[:]
1720 1732 cmdutil.checkunfinished(repo)
1721 1733 cmdutil.bailifchanged(repo)
1722 1734
1723 1735 revs.extend(freeargs)
1724 1736 if not revs:
1725 1737 defaultrev = destutil.desthistedit(ui, repo)
1726 1738 if defaultrev is not None:
1727 1739 revs.append(defaultrev)
1728 1740 if len(revs) != 1:
1729 1741 raise error.InputError(
1730 1742 _(b'histedit requires exactly one ancestor revision')
1731 1743 )
1732 1744
1733 1745 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
1734 1746 if len(rr) != 1:
1735 1747 raise error.InputError(
1736 1748 _(
1737 1749 b'The specified revisions must have '
1738 1750 b'exactly one common root'
1739 1751 )
1740 1752 )
1741 1753 root = rr[0].node()
1742 1754
1743 1755 topmost = repo.dirstate.p1()
1744 1756 revs = between(repo, root, topmost, keep)
1745 1757 if not revs:
1746 1758 raise error.InputError(
1747 1759 _(b'%s is not an ancestor of working directory') % short(root)
1748 1760 )
1749 1761
1750 1762 rules = []
1751 1763 for i, r in enumerate(revs):
1752 1764 rules.append(histeditrule(ui, repo[r], i))
1753 1765 with util.with_lc_ctype():
1754 1766 rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules))
1755 1767 curses.echo()
1756 1768 curses.endwin()
1757 1769 if rc is False:
1758 1770 ui.write(_(b"histedit aborted\n"))
1759 1771 return 0
1760 1772 if type(rc) is list:
1761 1773 ui.status(_(b"performing changes\n"))
1762 1774 rules = makecommands(rc)
1763 1775 with repo.vfs(b'chistedit', b'w+') as fp:
1764 1776 for r in rules:
1765 1777 fp.write(r)
1766 1778 opts[b'commands'] = fp.name
1767 1779 return _texthistedit(ui, repo, freeargs, opts)
1768 1780 except KeyboardInterrupt:
1769 1781 pass
1770 1782 return -1
1771 1783
1772 1784
1773 1785 @command(
1774 1786 b'histedit',
1775 1787 [
1776 1788 (
1777 1789 b'',
1778 1790 b'commands',
1779 1791 b'',
1780 1792 _(b'read history edits from the specified file'),
1781 1793 _(b'FILE'),
1782 1794 ),
1783 1795 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1784 1796 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1785 1797 (
1786 1798 b'k',
1787 1799 b'keep',
1788 1800 False,
1789 1801 _(b"don't strip old nodes after edit is complete"),
1790 1802 ),
1791 1803 (b'', b'abort', False, _(b'abort an edit in progress')),
1792 1804 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1793 1805 (
1794 1806 b'f',
1795 1807 b'force',
1796 1808 False,
1797 1809 _(b'force outgoing even for unrelated repositories'),
1798 1810 ),
1799 1811 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1800 1812 ]
1801 1813 + cmdutil.formatteropts,
1802 1814 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1803 1815 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1804 1816 )
1805 1817 def histedit(ui, repo, *freeargs, **opts):
1806 1818 """interactively edit changeset history
1807 1819
1808 1820 This command lets you edit a linear series of changesets (up to
1809 1821 and including the working directory, which should be clean).
1810 1822 You can:
1811 1823
1812 1824 - `pick` to [re]order a changeset
1813 1825
1814 1826 - `drop` to omit changeset
1815 1827
1816 1828 - `mess` to reword the changeset commit message
1817 1829
1818 1830 - `fold` to combine it with the preceding changeset (using the later date)
1819 1831
1820 1832 - `roll` like fold, but discarding this commit's description and date
1821 1833
1822 1834 - `edit` to edit this changeset (preserving date)
1823 1835
1824 1836 - `base` to checkout changeset and apply further changesets from there
1825 1837
1826 1838 There are a number of ways to select the root changeset:
1827 1839
1828 1840 - Specify ANCESTOR directly
1829 1841
1830 1842 - Use --outgoing -- it will be the first linear changeset not
1831 1843 included in destination. (See :hg:`help config.paths.default-push`)
1832 1844
1833 1845 - Otherwise, the value from the "histedit.defaultrev" config option
1834 1846 is used as a revset to select the base revision when ANCESTOR is not
1835 1847 specified. The first revision returned by the revset is used. By
1836 1848 default, this selects the editable history that is unique to the
1837 1849 ancestry of the working directory.
1838 1850
1839 1851 .. container:: verbose
1840 1852
1841 1853 If you use --outgoing, this command will abort if there are ambiguous
1842 1854 outgoing revisions. For example, if there are multiple branches
1843 1855 containing outgoing revisions.
1844 1856
1845 1857 Use "min(outgoing() and ::.)" or similar revset specification
1846 1858 instead of --outgoing to specify edit target revision exactly in
1847 1859 such ambiguous situation. See :hg:`help revsets` for detail about
1848 1860 selecting revisions.
1849 1861
1850 1862 .. container:: verbose
1851 1863
1852 1864 Examples:
1853 1865
1854 1866 - A number of changes have been made.
1855 1867 Revision 3 is no longer needed.
1856 1868
1857 1869 Start history editing from revision 3::
1858 1870
1859 1871 hg histedit -r 3
1860 1872
1861 1873 An editor opens, containing the list of revisions,
1862 1874 with specific actions specified::
1863 1875
1864 1876 pick 5339bf82f0ca 3 Zworgle the foobar
1865 1877 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1866 1878 pick 0a9639fcda9d 5 Morgify the cromulancy
1867 1879
1868 1880 Additional information about the possible actions
1869 1881 to take appears below the list of revisions.
1870 1882
1871 1883 To remove revision 3 from the history,
1872 1884 its action (at the beginning of the relevant line)
1873 1885 is changed to 'drop'::
1874 1886
1875 1887 drop 5339bf82f0ca 3 Zworgle the foobar
1876 1888 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1877 1889 pick 0a9639fcda9d 5 Morgify the cromulancy
1878 1890
1879 1891 - A number of changes have been made.
1880 1892 Revision 2 and 4 need to be swapped.
1881 1893
1882 1894 Start history editing from revision 2::
1883 1895
1884 1896 hg histedit -r 2
1885 1897
1886 1898 An editor opens, containing the list of revisions,
1887 1899 with specific actions specified::
1888 1900
1889 1901 pick 252a1af424ad 2 Blorb a morgwazzle
1890 1902 pick 5339bf82f0ca 3 Zworgle the foobar
1891 1903 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1892 1904
1893 1905 To swap revision 2 and 4, its lines are swapped
1894 1906 in the editor::
1895 1907
1896 1908 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1897 1909 pick 5339bf82f0ca 3 Zworgle the foobar
1898 1910 pick 252a1af424ad 2 Blorb a morgwazzle
1899 1911
1900 1912 Returns 0 on success, 1 if user intervention is required (not only
1901 1913 for intentional "edit" command, but also for resolving unexpected
1902 1914 conflicts).
1903 1915 """
1904 1916 opts = pycompat.byteskwargs(opts)
1905 1917
1906 1918 # kludge: _chistedit only works for starting an edit, not aborting
1907 1919 # or continuing, so fall back to regular _texthistedit for those
1908 1920 # operations.
1909 1921 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1910 1922 return _chistedit(ui, repo, freeargs, opts)
1911 1923 return _texthistedit(ui, repo, freeargs, opts)
1912 1924
1913 1925
1914 1926 def _texthistedit(ui, repo, freeargs, opts):
1915 1927 state = histeditstate(repo)
1916 1928 with repo.wlock() as wlock, repo.lock() as lock:
1917 1929 state.wlock = wlock
1918 1930 state.lock = lock
1919 1931 _histedit(ui, repo, state, freeargs, opts)
1920 1932
1921 1933
1922 1934 goalcontinue = b'continue'
1923 1935 goalabort = b'abort'
1924 1936 goaleditplan = b'edit-plan'
1925 1937 goalnew = b'new'
1926 1938
1927 1939
1928 1940 def _getgoal(opts):
1929 1941 if opts.get(b'continue'):
1930 1942 return goalcontinue
1931 1943 if opts.get(b'abort'):
1932 1944 return goalabort
1933 1945 if opts.get(b'edit_plan'):
1934 1946 return goaleditplan
1935 1947 return goalnew
1936 1948
1937 1949
1938 1950 def _readfile(ui, path):
1939 1951 if path == b'-':
1940 1952 with ui.timeblockedsection(b'histedit'):
1941 1953 return ui.fin.read()
1942 1954 else:
1943 1955 with open(path, b'rb') as f:
1944 1956 return f.read()
1945 1957
1946 1958
1947 1959 def _validateargs(ui, repo, freeargs, opts, goal, rules, revs):
1948 1960 # TODO only abort if we try to histedit mq patches, not just
1949 1961 # blanket if mq patches are applied somewhere
1950 1962 mq = getattr(repo, 'mq', None)
1951 1963 if mq and mq.applied:
1952 1964 raise error.StateError(_(b'source has mq patches applied'))
1953 1965
1954 1966 # basic argument incompatibility processing
1955 1967 outg = opts.get(b'outgoing')
1956 1968 editplan = opts.get(b'edit_plan')
1957 1969 abort = opts.get(b'abort')
1958 1970 force = opts.get(b'force')
1959 1971 if force and not outg:
1960 1972 raise error.InputError(_(b'--force only allowed with --outgoing'))
1961 1973 if goal == b'continue':
1962 1974 if any((outg, abort, revs, freeargs, rules, editplan)):
1963 1975 raise error.InputError(_(b'no arguments allowed with --continue'))
1964 1976 elif goal == b'abort':
1965 1977 if any((outg, revs, freeargs, rules, editplan)):
1966 1978 raise error.InputError(_(b'no arguments allowed with --abort'))
1967 1979 elif goal == b'edit-plan':
1968 1980 if any((outg, revs, freeargs)):
1969 1981 raise error.InputError(
1970 1982 _(b'only --commands argument allowed with --edit-plan')
1971 1983 )
1972 1984 else:
1973 1985 if outg:
1974 1986 if revs:
1975 1987 raise error.InputError(
1976 1988 _(b'no revisions allowed with --outgoing')
1977 1989 )
1978 1990 if len(freeargs) > 1:
1979 1991 raise error.InputError(
1980 1992 _(b'only one repo argument allowed with --outgoing')
1981 1993 )
1982 1994 else:
1983 1995 revs.extend(freeargs)
1984 1996 if len(revs) == 0:
1985 1997 defaultrev = destutil.desthistedit(ui, repo)
1986 1998 if defaultrev is not None:
1987 1999 revs.append(defaultrev)
1988 2000
1989 2001 if len(revs) != 1:
1990 2002 raise error.InputError(
1991 2003 _(b'histedit requires exactly one ancestor revision')
1992 2004 )
1993 2005
1994 2006
1995 2007 def _histedit(ui, repo, state, freeargs, opts):
1996 2008 fm = ui.formatter(b'histedit', opts)
1997 2009 fm.startitem()
1998 2010 goal = _getgoal(opts)
1999 2011 revs = opts.get(b'rev', [])
2000 2012 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2001 2013 rules = opts.get(b'commands', b'')
2002 2014 state.keep = opts.get(b'keep', False)
2003 2015
2004 2016 _validateargs(ui, repo, freeargs, opts, goal, rules, revs)
2005 2017
2006 2018 hastags = False
2007 2019 if revs:
2008 2020 revs = logcmdutil.revrange(repo, revs)
2009 2021 ctxs = [repo[rev] for rev in revs]
2010 2022 for ctx in ctxs:
2011 2023 tags = [tag for tag in ctx.tags() if tag != b'tip']
2012 2024 if not hastags:
2013 2025 hastags = len(tags)
2014 2026 if hastags:
2015 2027 if ui.promptchoice(
2016 2028 _(
2017 2029 b'warning: tags associated with the given'
2018 2030 b' changeset will be lost after histedit.\n'
2019 2031 b'do you want to continue (yN)? $$ &Yes $$ &No'
2020 2032 ),
2021 2033 default=1,
2022 2034 ):
2023 2035 raise error.CanceledError(_(b'histedit cancelled\n'))
2024 2036 # rebuild state
2025 2037 if goal == goalcontinue:
2026 2038 state.read()
2027 2039 state = bootstrapcontinue(ui, state, opts)
2028 2040 elif goal == goaleditplan:
2029 2041 _edithisteditplan(ui, repo, state, rules)
2030 2042 return
2031 2043 elif goal == goalabort:
2032 2044 _aborthistedit(ui, repo, state, nobackup=nobackup)
2033 2045 return
2034 2046 else:
2035 2047 # goal == goalnew
2036 2048 _newhistedit(ui, repo, state, revs, freeargs, opts)
2037 2049
2038 2050 _continuehistedit(ui, repo, state)
2039 2051 _finishhistedit(ui, repo, state, fm)
2040 2052 fm.end()
2041 2053
2042 2054
2043 2055 def _continuehistedit(ui, repo, state):
2044 2056 """This function runs after either:
2045 2057 - bootstrapcontinue (if the goal is 'continue')
2046 2058 - _newhistedit (if the goal is 'new')
2047 2059 """
2048 2060 # preprocess rules so that we can hide inner folds from the user
2049 2061 # and only show one editor
2050 2062 actions = state.actions[:]
2051 2063 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2052 2064 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2053 2065 state.actions[idx].__class__ = _multifold
2054 2066
2055 2067 # Force an initial state file write, so the user can run --abort/continue
2056 2068 # even if there's an exception before the first transaction serialize.
2057 2069 state.write()
2058 2070
2059 2071 tr = None
2060 2072 # Don't use singletransaction by default since it rolls the entire
2061 2073 # transaction back if an unexpected exception happens (like a
2062 2074 # pretxncommit hook throws, or the user aborts the commit msg editor).
2063 2075 if ui.configbool(b"histedit", b"singletransaction"):
2064 2076 # Don't use a 'with' for the transaction, since actions may close
2065 2077 # and reopen a transaction. For example, if the action executes an
2066 2078 # external process it may choose to commit the transaction first.
2067 2079 tr = repo.transaction(b'histedit')
2068 2080 progress = ui.makeprogress(
2069 2081 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2070 2082 )
2071 2083 with progress, util.acceptintervention(tr):
2072 2084 while state.actions:
2073 2085 state.write(tr=tr)
2074 2086 actobj = state.actions[0]
2075 2087 progress.increment(item=actobj.torule())
2076 2088 ui.debug(
2077 2089 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2078 2090 )
2079 2091 parentctx, replacement_ = actobj.run()
2080 2092 state.parentctxnode = parentctx.node()
2081 2093 state.replacements.extend(replacement_)
2082 2094 state.actions.pop(0)
2083 2095
2084 2096 state.write()
2085 2097
2086 2098
2087 2099 def _finishhistedit(ui, repo, state, fm):
2088 2100 """This action runs when histedit is finishing its session"""
2089 2101 mergemod.update(repo[state.parentctxnode])
2090 2102
2091 2103 mapping, tmpnodes, created, ntm = processreplacement(state)
2092 2104 if mapping:
2093 2105 for prec, succs in pycompat.iteritems(mapping):
2094 2106 if not succs:
2095 2107 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2096 2108 else:
2097 2109 ui.debug(
2098 2110 b'histedit: %s is replaced by %s\n'
2099 2111 % (short(prec), short(succs[0]))
2100 2112 )
2101 2113 if len(succs) > 1:
2102 2114 m = b'histedit: %s'
2103 2115 for n in succs[1:]:
2104 2116 ui.debug(m % short(n))
2105 2117
2106 2118 if not state.keep:
2107 2119 if mapping:
2108 2120 movetopmostbookmarks(repo, state.topmost, ntm)
2109 2121 # TODO update mq state
2110 2122 else:
2111 2123 mapping = {}
2112 2124
2113 2125 for n in tmpnodes:
2114 2126 if n in repo:
2115 2127 mapping[n] = ()
2116 2128
2117 2129 # remove entries about unknown nodes
2118 2130 has_node = repo.unfiltered().changelog.index.has_node
2119 2131 mapping = {
2120 2132 k: v
2121 2133 for k, v in mapping.items()
2122 2134 if has_node(k) and all(has_node(n) for n in v)
2123 2135 }
2124 2136 scmutil.cleanupnodes(repo, mapping, b'histedit')
2125 2137 hf = fm.hexfunc
2126 2138 fl = fm.formatlist
2127 2139 fd = fm.formatdict
2128 2140 nodechanges = fd(
2129 2141 {
2130 2142 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2131 2143 for oldn, newn in pycompat.iteritems(mapping)
2132 2144 },
2133 2145 key=b"oldnode",
2134 2146 value=b"newnodes",
2135 2147 )
2136 2148 fm.data(nodechanges=nodechanges)
2137 2149
2138 2150 state.clear()
2139 2151 if os.path.exists(repo.sjoin(b'undo')):
2140 2152 os.unlink(repo.sjoin(b'undo'))
2141 2153 if repo.vfs.exists(b'histedit-last-edit.txt'):
2142 2154 repo.vfs.unlink(b'histedit-last-edit.txt')
2143 2155
2144 2156
2145 2157 def _aborthistedit(ui, repo, state, nobackup=False):
2146 2158 try:
2147 2159 state.read()
2148 2160 __, leafs, tmpnodes, __ = processreplacement(state)
2149 2161 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2150 2162
2151 2163 # Recover our old commits if necessary
2152 2164 if not state.topmost in repo and state.backupfile:
2153 2165 backupfile = repo.vfs.join(state.backupfile)
2154 2166 f = hg.openpath(ui, backupfile)
2155 2167 gen = exchange.readbundle(ui, f, backupfile)
2156 2168 with repo.transaction(b'histedit.abort') as tr:
2157 2169 bundle2.applybundle(
2158 2170 repo,
2159 2171 gen,
2160 2172 tr,
2161 2173 source=b'histedit',
2162 2174 url=b'bundle:' + backupfile,
2163 2175 )
2164 2176
2165 2177 os.remove(backupfile)
2166 2178
2167 2179 # check whether we should update away
2168 2180 if repo.unfiltered().revs(
2169 2181 b'parents() and (%n or %ln::)',
2170 2182 state.parentctxnode,
2171 2183 leafs | tmpnodes,
2172 2184 ):
2173 2185 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2174 2186 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2175 2187 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2176 2188 except Exception:
2177 2189 if state.inprogress():
2178 2190 ui.warn(
2179 2191 _(
2180 2192 b'warning: encountered an exception during histedit '
2181 2193 b'--abort; the repository may not have been completely '
2182 2194 b'cleaned up\n'
2183 2195 )
2184 2196 )
2185 2197 raise
2186 2198 finally:
2187 2199 state.clear()
2188 2200
2189 2201
2190 2202 def hgaborthistedit(ui, repo):
2191 2203 state = histeditstate(repo)
2192 2204 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2193 2205 with repo.wlock() as wlock, repo.lock() as lock:
2194 2206 state.wlock = wlock
2195 2207 state.lock = lock
2196 2208 _aborthistedit(ui, repo, state, nobackup=nobackup)
2197 2209
2198 2210
2199 2211 def _edithisteditplan(ui, repo, state, rules):
2200 2212 state.read()
2201 2213 if not rules:
2202 2214 comment = geteditcomment(
2203 2215 ui, short(state.parentctxnode), short(state.topmost)
2204 2216 )
2205 2217 rules = ruleeditor(repo, ui, state.actions, comment)
2206 2218 else:
2207 2219 rules = _readfile(ui, rules)
2208 2220 actions = parserules(rules, state)
2209 2221 ctxs = [repo[act.node] for act in state.actions if act.node]
2210 2222 warnverifyactions(ui, repo, actions, state, ctxs)
2211 2223 state.actions = actions
2212 2224 state.write()
2213 2225
2214 2226
2215 2227 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2216 2228 outg = opts.get(b'outgoing')
2217 2229 rules = opts.get(b'commands', b'')
2218 2230 force = opts.get(b'force')
2219 2231
2220 2232 cmdutil.checkunfinished(repo)
2221 2233 cmdutil.bailifchanged(repo)
2222 2234
2223 2235 topmost = repo.dirstate.p1()
2224 2236 if outg:
2225 2237 if freeargs:
2226 2238 remote = freeargs[0]
2227 2239 else:
2228 2240 remote = None
2229 2241 root = findoutgoing(ui, repo, remote, force, opts)
2230 2242 else:
2231 2243 rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs)))
2232 2244 if len(rr) != 1:
2233 2245 raise error.InputError(
2234 2246 _(
2235 2247 b'The specified revisions must have '
2236 2248 b'exactly one common root'
2237 2249 )
2238 2250 )
2239 2251 root = rr[0].node()
2240 2252
2241 2253 revs = between(repo, root, topmost, state.keep)
2242 2254 if not revs:
2243 2255 raise error.InputError(
2244 2256 _(b'%s is not an ancestor of working directory') % short(root)
2245 2257 )
2246 2258
2247 2259 ctxs = [repo[r] for r in revs]
2248 2260
2249 2261 wctx = repo[None]
2250 2262 # Please don't ask me why `ancestors` is this value. I figured it
2251 2263 # out with print-debugging, not by actually understanding what the
2252 2264 # merge code is doing. :(
2253 2265 ancs = [repo[b'.']]
2254 2266 # Sniff-test to make sure we won't collide with untracked files in
2255 2267 # the working directory. If we don't do this, we can get a
2256 2268 # collision after we've started histedit and backing out gets ugly
2257 2269 # for everyone, especially the user.
2258 2270 for c in [ctxs[0].p1()] + ctxs:
2259 2271 try:
2260 2272 mergemod.calculateupdates(
2261 2273 repo,
2262 2274 wctx,
2263 2275 c,
2264 2276 ancs,
2265 2277 # These parameters were determined by print-debugging
2266 2278 # what happens later on inside histedit.
2267 2279 branchmerge=False,
2268 2280 force=False,
2269 2281 acceptremote=False,
2270 2282 followcopies=False,
2271 2283 )
2272 2284 except error.Abort:
2273 2285 raise error.StateError(
2274 2286 _(
2275 2287 b"untracked files in working directory conflict with files in %s"
2276 2288 )
2277 2289 % c
2278 2290 )
2279 2291
2280 2292 if not rules:
2281 2293 comment = geteditcomment(ui, short(root), short(topmost))
2282 2294 actions = [pick(state, r) for r in revs]
2283 2295 rules = ruleeditor(repo, ui, actions, comment)
2284 2296 else:
2285 2297 rules = _readfile(ui, rules)
2286 2298 actions = parserules(rules, state)
2287 2299 warnverifyactions(ui, repo, actions, state, ctxs)
2288 2300
2289 2301 parentctxnode = repo[root].p1().node()
2290 2302
2291 2303 state.parentctxnode = parentctxnode
2292 2304 state.actions = actions
2293 2305 state.topmost = topmost
2294 2306 state.replacements = []
2295 2307
2296 2308 ui.log(
2297 2309 b"histedit",
2298 2310 b"%d actions to histedit\n",
2299 2311 len(actions),
2300 2312 histedit_num_actions=len(actions),
2301 2313 )
2302 2314
2303 2315 # Create a backup so we can always abort completely.
2304 2316 backupfile = None
2305 2317 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2306 2318 backupfile = repair.backupbundle(
2307 2319 repo, [parentctxnode], [topmost], root, b'histedit'
2308 2320 )
2309 2321 state.backupfile = backupfile
2310 2322
2311 2323
2312 2324 def _getsummary(ctx):
2313 2325 # a common pattern is to extract the summary but default to the empty
2314 2326 # string
2315 2327 summary = ctx.description() or b''
2316 2328 if summary:
2317 2329 summary = summary.splitlines()[0]
2318 2330 return summary
2319 2331
2320 2332
2321 2333 def bootstrapcontinue(ui, state, opts):
2322 2334 repo = state.repo
2323 2335
2324 2336 ms = mergestatemod.mergestate.read(repo)
2325 2337 mergeutil.checkunresolved(ms)
2326 2338
2327 2339 if state.actions:
2328 2340 actobj = state.actions.pop(0)
2329 2341
2330 2342 if _isdirtywc(repo):
2331 2343 actobj.continuedirty()
2332 2344 if _isdirtywc(repo):
2333 2345 abortdirty()
2334 2346
2335 2347 parentctx, replacements = actobj.continueclean()
2336 2348
2337 2349 state.parentctxnode = parentctx.node()
2338 2350 state.replacements.extend(replacements)
2339 2351
2340 2352 return state
2341 2353
2342 2354
2343 2355 def between(repo, old, new, keep):
2344 2356 """select and validate the set of revision to edit
2345 2357
2346 2358 When keep is false, the specified set can't have children."""
2347 2359 revs = repo.revs(b'%n::%n', old, new)
2348 2360 if revs and not keep:
2349 2361 rewriteutil.precheck(repo, revs, b'edit')
2350 2362 if repo.revs(b'(%ld) and merge()', revs):
2351 2363 raise error.StateError(
2352 2364 _(b'cannot edit history that contains merges')
2353 2365 )
2354 2366 return pycompat.maplist(repo.changelog.node, revs)
2355 2367
2356 2368
2357 2369 def ruleeditor(repo, ui, actions, editcomment=b""):
2358 2370 """open an editor to edit rules
2359 2371
2360 2372 rules are in the format [ [act, ctx], ...] like in state.rules
2361 2373 """
2362 2374 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2363 2375 newact = util.sortdict()
2364 2376 for act in actions:
2365 2377 ctx = repo[act.node]
2366 2378 summary = _getsummary(ctx)
2367 2379 fword = summary.split(b' ', 1)[0].lower()
2368 2380 added = False
2369 2381
2370 2382 # if it doesn't end with the special character '!' just skip this
2371 2383 if fword.endswith(b'!'):
2372 2384 fword = fword[:-1]
2373 2385 if fword in primaryactions | secondaryactions | tertiaryactions:
2374 2386 act.verb = fword
2375 2387 # get the target summary
2376 2388 tsum = summary[len(fword) + 1 :].lstrip()
2377 2389 # safe but slow: reverse iterate over the actions so we
2378 2390 # don't clash on two commits having the same summary
2379 2391 for na, l in reversed(list(pycompat.iteritems(newact))):
2380 2392 actx = repo[na.node]
2381 2393 asum = _getsummary(actx)
2382 2394 if asum == tsum:
2383 2395 added = True
2384 2396 l.append(act)
2385 2397 break
2386 2398
2387 2399 if not added:
2388 2400 newact[act] = []
2389 2401
2390 2402 # copy over and flatten the new list
2391 2403 actions = []
2392 2404 for na, l in pycompat.iteritems(newact):
2393 2405 actions.append(na)
2394 2406 actions += l
2395 2407
2396 2408 rules = b'\n'.join([act.torule() for act in actions])
2397 2409 rules += b'\n\n'
2398 2410 rules += editcomment
2399 2411 rules = ui.edit(
2400 2412 rules,
2401 2413 ui.username(),
2402 2414 {b'prefix': b'histedit'},
2403 2415 repopath=repo.path,
2404 2416 action=b'histedit',
2405 2417 )
2406 2418
2407 2419 # Save edit rules in .hg/histedit-last-edit.txt in case
2408 2420 # the user needs to ask for help after something
2409 2421 # surprising happens.
2410 2422 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2411 2423 f.write(rules)
2412 2424
2413 2425 return rules
2414 2426
2415 2427
2416 2428 def parserules(rules, state):
2417 2429 """Read the histedit rules string and return list of action objects"""
2418 2430 rules = [
2419 2431 l
2420 2432 for l in (r.strip() for r in rules.splitlines())
2421 2433 if l and not l.startswith(b'#')
2422 2434 ]
2423 2435 actions = []
2424 2436 for r in rules:
2425 2437 if b' ' not in r:
2426 2438 raise error.ParseError(_(b'malformed line "%s"') % r)
2427 2439 verb, rest = r.split(b' ', 1)
2428 2440
2429 2441 if verb not in actiontable:
2430 2442 raise error.ParseError(_(b'unknown action "%s"') % verb)
2431 2443
2432 2444 action = actiontable[verb].fromrule(state, rest)
2433 2445 actions.append(action)
2434 2446 return actions
2435 2447
2436 2448
2437 2449 def warnverifyactions(ui, repo, actions, state, ctxs):
2438 2450 try:
2439 2451 verifyactions(actions, state, ctxs)
2440 2452 except error.ParseError:
2441 2453 if repo.vfs.exists(b'histedit-last-edit.txt'):
2442 2454 ui.warn(
2443 2455 _(
2444 2456 b'warning: histedit rules saved '
2445 2457 b'to: .hg/histedit-last-edit.txt\n'
2446 2458 )
2447 2459 )
2448 2460 raise
2449 2461
2450 2462
2451 2463 def verifyactions(actions, state, ctxs):
2452 2464 """Verify that there exists exactly one action per given changeset and
2453 2465 other constraints.
2454 2466
2455 2467 Will abort if there are to many or too few rules, a malformed rule,
2456 2468 or a rule on a changeset outside of the user-given range.
2457 2469 """
2458 2470 expected = {c.node() for c in ctxs}
2459 2471 seen = set()
2460 2472 prev = None
2461 2473
2462 2474 if actions and actions[0].verb in [b'roll', b'fold']:
2463 2475 raise error.ParseError(
2464 2476 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2465 2477 )
2466 2478
2467 2479 for action in actions:
2468 2480 action.verify(prev, expected, seen)
2469 2481 prev = action
2470 2482 if action.node is not None:
2471 2483 seen.add(action.node)
2472 2484 missing = sorted(expected - seen) # sort to stabilize output
2473 2485
2474 2486 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2475 2487 if len(actions) == 0:
2476 2488 raise error.ParseError(
2477 2489 _(b'no rules provided'),
2478 2490 hint=_(b'use strip extension to remove commits'),
2479 2491 )
2480 2492
2481 2493 drops = [drop(state, n) for n in missing]
2482 2494 # put the in the beginning so they execute immediately and
2483 2495 # don't show in the edit-plan in the future
2484 2496 actions[:0] = drops
2485 2497 elif missing:
2486 2498 raise error.ParseError(
2487 2499 _(b'missing rules for changeset %s') % short(missing[0]),
2488 2500 hint=_(
2489 2501 b'use "drop %s" to discard, see also: '
2490 2502 b"'hg help -e histedit.config'"
2491 2503 )
2492 2504 % short(missing[0]),
2493 2505 )
2494 2506
2495 2507
2496 2508 def adjustreplacementsfrommarkers(repo, oldreplacements):
2497 2509 """Adjust replacements from obsolescence markers
2498 2510
2499 2511 Replacements structure is originally generated based on
2500 2512 histedit's state and does not account for changes that are
2501 2513 not recorded there. This function fixes that by adding
2502 2514 data read from obsolescence markers"""
2503 2515 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2504 2516 return oldreplacements
2505 2517
2506 2518 unfi = repo.unfiltered()
2507 2519 get_rev = unfi.changelog.index.get_rev
2508 2520 obsstore = repo.obsstore
2509 2521 newreplacements = list(oldreplacements)
2510 2522 oldsuccs = [r[1] for r in oldreplacements]
2511 2523 # successors that have already been added to succstocheck once
2512 2524 seensuccs = set().union(
2513 2525 *oldsuccs
2514 2526 ) # create a set from an iterable of tuples
2515 2527 succstocheck = list(seensuccs)
2516 2528 while succstocheck:
2517 2529 n = succstocheck.pop()
2518 2530 missing = get_rev(n) is None
2519 2531 markers = obsstore.successors.get(n, ())
2520 2532 if missing and not markers:
2521 2533 # dead end, mark it as such
2522 2534 newreplacements.append((n, ()))
2523 2535 for marker in markers:
2524 2536 nsuccs = marker[1]
2525 2537 newreplacements.append((n, nsuccs))
2526 2538 for nsucc in nsuccs:
2527 2539 if nsucc not in seensuccs:
2528 2540 seensuccs.add(nsucc)
2529 2541 succstocheck.append(nsucc)
2530 2542
2531 2543 return newreplacements
2532 2544
2533 2545
2534 2546 def processreplacement(state):
2535 2547 """process the list of replacements to return
2536 2548
2537 2549 1) the final mapping between original and created nodes
2538 2550 2) the list of temporary node created by histedit
2539 2551 3) the list of new commit created by histedit"""
2540 2552 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2541 2553 allsuccs = set()
2542 2554 replaced = set()
2543 2555 fullmapping = {}
2544 2556 # initialize basic set
2545 2557 # fullmapping records all operations recorded in replacement
2546 2558 for rep in replacements:
2547 2559 allsuccs.update(rep[1])
2548 2560 replaced.add(rep[0])
2549 2561 fullmapping.setdefault(rep[0], set()).update(rep[1])
2550 2562 new = allsuccs - replaced
2551 2563 tmpnodes = allsuccs & replaced
2552 2564 # Reduce content fullmapping into direct relation between original nodes
2553 2565 # and final node created during history edition
2554 2566 # Dropped changeset are replaced by an empty list
2555 2567 toproceed = set(fullmapping)
2556 2568 final = {}
2557 2569 while toproceed:
2558 2570 for x in list(toproceed):
2559 2571 succs = fullmapping[x]
2560 2572 for s in list(succs):
2561 2573 if s in toproceed:
2562 2574 # non final node with unknown closure
2563 2575 # We can't process this now
2564 2576 break
2565 2577 elif s in final:
2566 2578 # non final node, replace with closure
2567 2579 succs.remove(s)
2568 2580 succs.update(final[s])
2569 2581 else:
2570 2582 final[x] = succs
2571 2583 toproceed.remove(x)
2572 2584 # remove tmpnodes from final mapping
2573 2585 for n in tmpnodes:
2574 2586 del final[n]
2575 2587 # we expect all changes involved in final to exist in the repo
2576 2588 # turn `final` into list (topologically sorted)
2577 2589 get_rev = state.repo.changelog.index.get_rev
2578 2590 for prec, succs in final.items():
2579 2591 final[prec] = sorted(succs, key=get_rev)
2580 2592
2581 2593 # computed topmost element (necessary for bookmark)
2582 2594 if new:
2583 2595 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2584 2596 elif not final:
2585 2597 # Nothing rewritten at all. we won't need `newtopmost`
2586 2598 # It is the same as `oldtopmost` and `processreplacement` know it
2587 2599 newtopmost = None
2588 2600 else:
2589 2601 # every body died. The newtopmost is the parent of the root.
2590 2602 r = state.repo.changelog.rev
2591 2603 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2592 2604
2593 2605 return final, tmpnodes, new, newtopmost
2594 2606
2595 2607
2596 2608 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2597 2609 """Move bookmark from oldtopmost to newly created topmost
2598 2610
2599 2611 This is arguably a feature and we may only want that for the active
2600 2612 bookmark. But the behavior is kept compatible with the old version for now.
2601 2613 """
2602 2614 if not oldtopmost or not newtopmost:
2603 2615 return
2604 2616 oldbmarks = repo.nodebookmarks(oldtopmost)
2605 2617 if oldbmarks:
2606 2618 with repo.lock(), repo.transaction(b'histedit') as tr:
2607 2619 marks = repo._bookmarks
2608 2620 changes = []
2609 2621 for name in oldbmarks:
2610 2622 changes.append((name, newtopmost))
2611 2623 marks.applychanges(repo, tr, changes)
2612 2624
2613 2625
2614 2626 def cleanupnode(ui, repo, nodes, nobackup=False):
2615 2627 """strip a group of nodes from the repository
2616 2628
2617 2629 The set of node to strip may contains unknown nodes."""
2618 2630 with repo.lock():
2619 2631 # do not let filtering get in the way of the cleanse
2620 2632 # we should probably get rid of obsolescence marker created during the
2621 2633 # histedit, but we currently do not have such information.
2622 2634 repo = repo.unfiltered()
2623 2635 # Find all nodes that need to be stripped
2624 2636 # (we use %lr instead of %ln to silently ignore unknown items)
2625 2637 has_node = repo.changelog.index.has_node
2626 2638 nodes = sorted(n for n in nodes if has_node(n))
2627 2639 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2628 2640 if roots:
2629 2641 backup = not nobackup
2630 2642 repair.strip(ui, repo, roots, backup=backup)
2631 2643
2632 2644
2633 2645 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2634 2646 if isinstance(nodelist, bytes):
2635 2647 nodelist = [nodelist]
2636 2648 state = histeditstate(repo)
2637 2649 if state.inprogress():
2638 2650 state.read()
2639 2651 histedit_nodes = {
2640 2652 action.node for action in state.actions if action.node
2641 2653 }
2642 2654 common_nodes = histedit_nodes & set(nodelist)
2643 2655 if common_nodes:
2644 2656 raise error.Abort(
2645 2657 _(b"histedit in progress, can't strip %s")
2646 2658 % b', '.join(short(x) for x in common_nodes)
2647 2659 )
2648 2660 return orig(ui, repo, nodelist, *args, **kwargs)
2649 2661
2650 2662
2651 2663 extensions.wrapfunction(repair, b'strip', stripwrapper)
2652 2664
2653 2665
2654 2666 def summaryhook(ui, repo):
2655 2667 state = histeditstate(repo)
2656 2668 if not state.inprogress():
2657 2669 return
2658 2670 state.read()
2659 2671 if state.actions:
2660 2672 # i18n: column positioning for "hg summary"
2661 2673 ui.write(
2662 2674 _(b'hist: %s (histedit --continue)\n')
2663 2675 % (
2664 2676 ui.label(_(b'%d remaining'), b'histedit.remaining')
2665 2677 % len(state.actions)
2666 2678 )
2667 2679 )
2668 2680
2669 2681
2670 2682 def extsetup(ui):
2671 2683 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2672 2684 statemod.addunfinished(
2673 2685 b'histedit',
2674 2686 fname=b'histedit-state',
2675 2687 allowcommit=True,
2676 2688 continueflag=True,
2677 2689 abortfunc=hgaborthistedit,
2678 2690 )
@@ -1,889 +1,896 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a Distributed SCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an
15 15 # audience not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Files to act upon/ignore are specified in the [keyword] section.
25 25 # Customized keyword template mappings in the [keywordmaps] section.
26 26 #
27 27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28 28
29 29 '''expand keywords in tracked files
30 30
31 31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 32 tracked text files selected by your configuration.
33 33
34 34 Keywords are only expanded in local repositories and not stored in the
35 35 change history. The mechanism can be regarded as a convenience for the
36 36 current user or for archive distribution.
37 37
38 38 Keywords expand to the changeset data pertaining to the latest change
39 39 relative to the working directory parent of each file.
40 40
41 41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 42 sections of hgrc files.
43 43
44 44 Example::
45 45
46 46 [keyword]
47 47 # expand keywords in every python file except those matching "x*"
48 48 **.py =
49 49 x* = ignore
50 50
51 51 [keywordset]
52 52 # prefer svn- over cvs-like default keywordmaps
53 53 svn = True
54 54
55 55 .. note::
56 56
57 57 The more specific you are in your filename patterns the less you
58 58 lose speed in huge repositories.
59 59
60 60 For [keywordmaps] template mapping and expansion demonstration and
61 61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 62 available templates and filters.
63 63
64 64 Three additional date template filters are provided:
65 65
66 66 :``utcdate``: "2006/09/18 15:13:13"
67 67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69 69
70 70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 71 replaced with customized keywords and templates. Again, run
72 72 :hg:`kwdemo` to control the results of your configuration changes.
73 73
74 74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 75 to avoid storing expanded keywords in the change history.
76 76
77 77 To force expansion after enabling it, or a configuration change, run
78 78 :hg:`kwexpand`.
79 79
80 80 Expansions spanning more than one line and incremental expansions,
81 81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 82 {desc}" expands to the first line of the changeset description.
83 83 '''
84 84
85 85
86 86 from __future__ import absolute_import
87 87
88 88 import os
89 89 import re
90 90 import weakref
91 91
92 92 from mercurial.i18n import _
93 93 from mercurial.pycompat import getattr
94 94 from mercurial.hgweb import webcommands
95 95
96 96 from mercurial import (
97 97 cmdutil,
98 98 context,
99 99 dispatch,
100 100 error,
101 101 extensions,
102 102 filelog,
103 103 localrepo,
104 104 logcmdutil,
105 105 match,
106 106 patch,
107 107 pathutil,
108 108 pycompat,
109 109 registrar,
110 110 scmutil,
111 111 templatefilters,
112 112 templateutil,
113 113 util,
114 114 )
115 115 from mercurial.utils import (
116 116 dateutil,
117 117 stringutil,
118 118 )
119 from mercurial.dirstateutils import timestamp
119 120
120 121 cmdtable = {}
121 122 command = registrar.command(cmdtable)
122 123 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
123 124 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
124 125 # be specifying the version(s) of Mercurial they are tested with, or
125 126 # leave the attribute unspecified.
126 127 testedwith = b'ships-with-hg-core'
127 128
128 129 # hg commands that do not act on keywords
129 130 nokwcommands = (
130 131 b'add addremove annotate bundle export grep incoming init log'
131 132 b' outgoing push tip verify convert email glog'
132 133 )
133 134
134 135 # webcommands that do not act on keywords
135 136 nokwwebcommands = b'annotate changeset rev filediff diff comparison'
136 137
137 138 # hg commands that trigger expansion only when writing to working dir,
138 139 # not when reading filelog, and unexpand when reading from working dir
139 140 restricted = (
140 141 b'merge kwexpand kwshrink record qrecord resolve transplant'
141 142 b' unshelve rebase graft backout histedit fetch'
142 143 )
143 144
144 145 # names of extensions using dorecord
145 146 recordextensions = b'record'
146 147
147 148 colortable = {
148 149 b'kwfiles.enabled': b'green bold',
149 150 b'kwfiles.deleted': b'cyan bold underline',
150 151 b'kwfiles.enabledunknown': b'green',
151 152 b'kwfiles.ignored': b'bold',
152 153 b'kwfiles.ignoredunknown': b'none',
153 154 }
154 155
155 156 templatefilter = registrar.templatefilter()
156 157
157 158 configtable = {}
158 159 configitem = registrar.configitem(configtable)
159 160
160 161 configitem(
161 162 b'keywordset',
162 163 b'svn',
163 164 default=False,
164 165 )
165 166 # date like in cvs' $Date
166 167 @templatefilter(b'utcdate', intype=templateutil.date)
167 168 def utcdate(date):
168 169 """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
169 170 dateformat = b'%Y/%m/%d %H:%M:%S'
170 171 return dateutil.datestr((date[0], 0), dateformat)
171 172
172 173
173 174 # date like in svn's $Date
174 175 @templatefilter(b'svnisodate', intype=templateutil.date)
175 176 def svnisodate(date):
176 177 """Date. Returns a date in this format: "2009-08-18 13:00:13
177 178 +0200 (Tue, 18 Aug 2009)".
178 179 """
179 180 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
180 181
181 182
182 183 # date like in svn's $Id
183 184 @templatefilter(b'svnutcdate', intype=templateutil.date)
184 185 def svnutcdate(date):
185 186 """Date. Returns a UTC-date in this format: "2009-08-18
186 187 11:00:13Z".
187 188 """
188 189 dateformat = b'%Y-%m-%d %H:%M:%SZ'
189 190 return dateutil.datestr((date[0], 0), dateformat)
190 191
191 192
192 193 # make keyword tools accessible
193 194 kwtools = {b'hgcmd': b''}
194 195
195 196
196 197 def _defaultkwmaps(ui):
197 198 '''Returns default keywordmaps according to keywordset configuration.'''
198 199 templates = {
199 200 b'Revision': b'{node|short}',
200 201 b'Author': b'{author|user}',
201 202 }
202 203 kwsets = (
203 204 {
204 205 b'Date': b'{date|utcdate}',
205 206 b'RCSfile': b'{file|basename},v',
206 207 b'RCSFile': b'{file|basename},v', # kept for backwards compatibility
207 208 # with hg-keyword
208 209 b'Source': b'{root}/{file},v',
209 210 b'Id': b'{file|basename},v {node|short} {date|utcdate} {author|user}',
210 211 b'Header': b'{root}/{file},v {node|short} {date|utcdate} {author|user}',
211 212 },
212 213 {
213 214 b'Date': b'{date|svnisodate}',
214 215 b'Id': b'{file|basename},v {node|short} {date|svnutcdate} {author|user}',
215 216 b'LastChangedRevision': b'{node|short}',
216 217 b'LastChangedBy': b'{author|user}',
217 218 b'LastChangedDate': b'{date|svnisodate}',
218 219 },
219 220 )
220 221 templates.update(kwsets[ui.configbool(b'keywordset', b'svn')])
221 222 return templates
222 223
223 224
224 225 def _shrinktext(text, subfunc):
225 226 """Helper for keyword expansion removal in text.
226 227 Depending on subfunc also returns number of substitutions."""
227 228 return subfunc(br'$\1$', text)
228 229
229 230
230 231 def _preselect(wstatus, changed):
231 232 """Retrieves modified and added files from a working directory state
232 233 and returns the subset of each contained in given changed files
233 234 retrieved from a change context."""
234 235 modified = [f for f in wstatus.modified if f in changed]
235 236 added = [f for f in wstatus.added if f in changed]
236 237 return modified, added
237 238
238 239
239 240 class kwtemplater(object):
240 241 """
241 242 Sets up keyword templates, corresponding keyword regex, and
242 243 provides keyword substitution functions.
243 244 """
244 245
245 246 def __init__(self, ui, repo, inc, exc):
246 247 self.ui = ui
247 248 self._repo = weakref.ref(repo)
248 249 self.match = match.match(repo.root, b'', [], inc, exc)
249 250 self.restrict = kwtools[b'hgcmd'] in restricted.split()
250 251 self.postcommit = False
251 252
252 253 kwmaps = self.ui.configitems(b'keywordmaps')
253 254 if kwmaps: # override default templates
254 255 self.templates = dict(kwmaps)
255 256 else:
256 257 self.templates = _defaultkwmaps(self.ui)
257 258
258 259 @property
259 260 def repo(self):
260 261 return self._repo()
261 262
262 263 @util.propertycache
263 264 def escape(self):
264 265 '''Returns bar-separated and escaped keywords.'''
265 266 return b'|'.join(map(stringutil.reescape, self.templates.keys()))
266 267
267 268 @util.propertycache
268 269 def rekw(self):
269 270 '''Returns regex for unexpanded keywords.'''
270 271 return re.compile(br'\$(%s)\$' % self.escape)
271 272
272 273 @util.propertycache
273 274 def rekwexp(self):
274 275 '''Returns regex for expanded keywords.'''
275 276 return re.compile(br'\$(%s): [^$\n\r]*? \$' % self.escape)
276 277
277 278 def substitute(self, data, path, ctx, subfunc):
278 279 '''Replaces keywords in data with expanded template.'''
279 280
280 281 def kwsub(mobj):
281 282 kw = mobj.group(1)
282 283 ct = logcmdutil.maketemplater(
283 284 self.ui, self.repo, self.templates[kw]
284 285 )
285 286 self.ui.pushbuffer()
286 287 ct.show(ctx, root=self.repo.root, file=path)
287 288 ekw = templatefilters.firstline(self.ui.popbuffer())
288 289 return b'$%s: %s $' % (kw, ekw)
289 290
290 291 return subfunc(kwsub, data)
291 292
292 293 def linkctx(self, path, fileid):
293 294 '''Similar to filelog.linkrev, but returns a changectx.'''
294 295 return self.repo.filectx(path, fileid=fileid).changectx()
295 296
296 297 def expand(self, path, node, data):
297 298 '''Returns data with keywords expanded.'''
298 299 if (
299 300 not self.restrict
300 301 and self.match(path)
301 302 and not stringutil.binary(data)
302 303 ):
303 304 ctx = self.linkctx(path, node)
304 305 return self.substitute(data, path, ctx, self.rekw.sub)
305 306 return data
306 307
307 308 def iskwfile(self, cand, ctx):
308 309 """Returns subset of candidates which are configured for keyword
309 310 expansion but are not symbolic links."""
310 311 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
311 312
312 313 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
313 314 '''Overwrites selected files expanding/shrinking keywords.'''
314 315 if self.restrict or lookup or self.postcommit: # exclude kw_copy
315 316 candidates = self.iskwfile(candidates, ctx)
316 317 if not candidates:
317 318 return
318 319 kwcmd = self.restrict and lookup # kwexpand/kwshrink
319 320 if self.restrict or expand and lookup:
320 321 mf = ctx.manifest()
321 322 if self.restrict or rekw:
322 323 re_kw = self.rekw
323 324 else:
324 325 re_kw = self.rekwexp
325 326 if expand:
326 327 msg = _(b'overwriting %s expanding keywords\n')
327 328 else:
328 329 msg = _(b'overwriting %s shrinking keywords\n')
330 wctx = self.repo[None]
329 331 for f in candidates:
330 332 if self.restrict:
331 333 data = self.repo.file(f).read(mf[f])
332 334 else:
333 335 data = self.repo.wread(f)
334 336 if stringutil.binary(data):
335 337 continue
336 338 if expand:
337 339 parents = ctx.parents()
338 340 if lookup:
339 341 ctx = self.linkctx(f, mf[f])
340 342 elif self.restrict and len(parents) > 1:
341 343 # merge commit
342 344 # in case of conflict f is in modified state during
343 345 # merge, even if f does not differ from f in parent
344 346 for p in parents:
345 347 if f in p and not p[f].cmp(ctx[f]):
346 348 ctx = p[f].changectx()
347 349 break
348 350 data, found = self.substitute(data, f, ctx, re_kw.subn)
349 351 elif self.restrict:
350 352 found = re_kw.search(data)
351 353 else:
352 354 data, found = _shrinktext(data, re_kw.subn)
353 355 if found:
354 356 self.ui.note(msg % f)
355 357 fp = self.repo.wvfs(f, b"wb", atomictemp=True)
356 358 fp.write(data)
357 359 fp.close()
358 360 if kwcmd:
359 self.repo.dirstate.set_clean(f)
361 s = wctx[f].lstat()
362 mode = s.st_mode
363 size = s.st_size
364 mtime = timestamp.mtime_of(s)
365 cache_data = (mode, size, mtime)
366 self.repo.dirstate.set_clean(f, cache_data)
360 367 elif self.postcommit:
361 368 self.repo.dirstate.update_file_p1(f, p1_tracked=True)
362 369
363 370 def shrink(self, fname, text):
364 371 '''Returns text with all keyword substitutions removed.'''
365 372 if self.match(fname) and not stringutil.binary(text):
366 373 return _shrinktext(text, self.rekwexp.sub)
367 374 return text
368 375
369 376 def shrinklines(self, fname, lines):
370 377 '''Returns lines with keyword substitutions removed.'''
371 378 if self.match(fname):
372 379 text = b''.join(lines)
373 380 if not stringutil.binary(text):
374 381 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
375 382 return lines
376 383
377 384 def wread(self, fname, data):
378 385 """If in restricted mode returns data read from wdir with
379 386 keyword substitutions removed."""
380 387 if self.restrict:
381 388 return self.shrink(fname, data)
382 389 return data
383 390
384 391
385 392 class kwfilelog(filelog.filelog):
386 393 """
387 394 Subclass of filelog to hook into its read, add, cmp methods.
388 395 Keywords are "stored" unexpanded, and processed on reading.
389 396 """
390 397
391 398 def __init__(self, opener, kwt, path):
392 399 super(kwfilelog, self).__init__(opener, path)
393 400 self.kwt = kwt
394 401 self.path = path
395 402
396 403 def read(self, node):
397 404 '''Expands keywords when reading filelog.'''
398 405 data = super(kwfilelog, self).read(node)
399 406 if self.renamed(node):
400 407 return data
401 408 return self.kwt.expand(self.path, node, data)
402 409
403 410 def add(self, text, meta, tr, link, p1=None, p2=None):
404 411 '''Removes keyword substitutions when adding to filelog.'''
405 412 text = self.kwt.shrink(self.path, text)
406 413 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
407 414
408 415 def cmp(self, node, text):
409 416 '''Removes keyword substitutions for comparison.'''
410 417 text = self.kwt.shrink(self.path, text)
411 418 return super(kwfilelog, self).cmp(node, text)
412 419
413 420
414 421 def _status(ui, repo, wctx, kwt, *pats, **opts):
415 422 """Bails out if [keyword] configuration is not active.
416 423 Returns status of working directory."""
417 424 if kwt:
418 425 opts = pycompat.byteskwargs(opts)
419 426 return repo.status(
420 427 match=scmutil.match(wctx, pats, opts),
421 428 clean=True,
422 429 unknown=opts.get(b'unknown') or opts.get(b'all'),
423 430 )
424 431 if ui.configitems(b'keyword'):
425 432 raise error.Abort(_(b'[keyword] patterns cannot match'))
426 433 raise error.Abort(_(b'no [keyword] patterns configured'))
427 434
428 435
429 436 def _kwfwrite(ui, repo, expand, *pats, **opts):
430 437 '''Selects files and passes them to kwtemplater.overwrite.'''
431 438 wctx = repo[None]
432 439 if len(wctx.parents()) > 1:
433 440 raise error.Abort(_(b'outstanding uncommitted merge'))
434 441 kwt = getattr(repo, '_keywordkwt', None)
435 442 with repo.wlock():
436 443 status = _status(ui, repo, wctx, kwt, *pats, **opts)
437 444 if status.modified or status.added or status.removed or status.deleted:
438 445 raise error.Abort(_(b'outstanding uncommitted changes'))
439 446 kwt.overwrite(wctx, status.clean, True, expand)
440 447
441 448
442 449 @command(
443 450 b'kwdemo',
444 451 [
445 452 (b'd', b'default', None, _(b'show default keyword template maps')),
446 453 (b'f', b'rcfile', b'', _(b'read maps from rcfile'), _(b'FILE')),
447 454 ],
448 455 _(b'hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
449 456 optionalrepo=True,
450 457 )
451 458 def demo(ui, repo, *args, **opts):
452 459 """print [keywordmaps] configuration and an expansion example
453 460
454 461 Show current, custom, or default keyword template maps and their
455 462 expansions.
456 463
457 464 Extend the current configuration by specifying maps as arguments
458 465 and using -f/--rcfile to source an external hgrc file.
459 466
460 467 Use -d/--default to disable current configuration.
461 468
462 469 See :hg:`help templates` for information on templates and filters.
463 470 """
464 471
465 472 def demoitems(section, items):
466 473 ui.write(b'[%s]\n' % section)
467 474 for k, v in sorted(items):
468 475 if isinstance(v, bool):
469 476 v = stringutil.pprint(v)
470 477 ui.write(b'%s = %s\n' % (k, v))
471 478
472 479 fn = b'demo.txt'
473 480 tmpdir = pycompat.mkdtemp(b'', b'kwdemo.')
474 481 ui.note(_(b'creating temporary repository at %s\n') % tmpdir)
475 482 if repo is None:
476 483 baseui = ui
477 484 else:
478 485 baseui = repo.baseui
479 486 repo = localrepo.instance(baseui, tmpdir, create=True)
480 487 ui.setconfig(b'keyword', fn, b'', b'keyword')
481 488 svn = ui.configbool(b'keywordset', b'svn')
482 489 # explicitly set keywordset for demo output
483 490 ui.setconfig(b'keywordset', b'svn', svn, b'keyword')
484 491
485 492 uikwmaps = ui.configitems(b'keywordmaps')
486 493 if args or opts.get('rcfile'):
487 494 ui.status(_(b'\n\tconfiguration using custom keyword template maps\n'))
488 495 if uikwmaps:
489 496 ui.status(_(b'\textending current template maps\n'))
490 497 if opts.get('default') or not uikwmaps:
491 498 if svn:
492 499 ui.status(_(b'\toverriding default svn keywordset\n'))
493 500 else:
494 501 ui.status(_(b'\toverriding default cvs keywordset\n'))
495 502 if opts.get('rcfile'):
496 503 ui.readconfig(opts.get(b'rcfile'))
497 504 if args:
498 505 # simulate hgrc parsing
499 506 rcmaps = b'[keywordmaps]\n%s\n' % b'\n'.join(args)
500 507 repo.vfs.write(b'hgrc', rcmaps)
501 508 ui.readconfig(repo.vfs.join(b'hgrc'))
502 509 kwmaps = dict(ui.configitems(b'keywordmaps'))
503 510 elif opts.get('default'):
504 511 if svn:
505 512 ui.status(_(b'\n\tconfiguration using default svn keywordset\n'))
506 513 else:
507 514 ui.status(_(b'\n\tconfiguration using default cvs keywordset\n'))
508 515 kwmaps = _defaultkwmaps(ui)
509 516 if uikwmaps:
510 517 ui.status(_(b'\tdisabling current template maps\n'))
511 518 for k, v in pycompat.iteritems(kwmaps):
512 519 ui.setconfig(b'keywordmaps', k, v, b'keyword')
513 520 else:
514 521 ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
515 522 if uikwmaps:
516 523 kwmaps = dict(uikwmaps)
517 524 else:
518 525 kwmaps = _defaultkwmaps(ui)
519 526
520 527 uisetup(ui)
521 528 reposetup(ui, repo)
522 529 ui.writenoi18n(b'[extensions]\nkeyword =\n')
523 530 demoitems(b'keyword', ui.configitems(b'keyword'))
524 531 demoitems(b'keywordset', ui.configitems(b'keywordset'))
525 532 demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
526 533 keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
527 534 repo.wvfs.write(fn, keywords)
528 535 repo[None].add([fn])
529 536 ui.note(_(b'\nkeywords written to %s:\n') % fn)
530 537 ui.note(keywords)
531 538 with repo.wlock():
532 539 repo.dirstate.setbranch(b'demobranch')
533 540 for name, cmd in ui.configitems(b'hooks'):
534 541 if name.split(b'.', 1)[0].find(b'commit') > -1:
535 542 repo.ui.setconfig(b'hooks', name, b'', b'keyword')
536 543 msg = _(b'hg keyword configuration and expansion example')
537 544 ui.note((b"hg ci -m '%s'\n" % msg))
538 545 repo.commit(text=msg)
539 546 ui.status(_(b'\n\tkeywords expanded\n'))
540 547 ui.write(repo.wread(fn))
541 548 repo.wvfs.rmtree(repo.root)
542 549
543 550
544 551 @command(
545 552 b'kwexpand',
546 553 cmdutil.walkopts,
547 554 _(b'hg kwexpand [OPTION]... [FILE]...'),
548 555 inferrepo=True,
549 556 )
550 557 def expand(ui, repo, *pats, **opts):
551 558 """expand keywords in the working directory
552 559
553 560 Run after (re)enabling keyword expansion.
554 561
555 562 kwexpand refuses to run if given files contain local changes.
556 563 """
557 564 # 3rd argument sets expansion to True
558 565 _kwfwrite(ui, repo, True, *pats, **opts)
559 566
560 567
561 568 @command(
562 569 b'kwfiles',
563 570 [
564 571 (b'A', b'all', None, _(b'show keyword status flags of all files')),
565 572 (b'i', b'ignore', None, _(b'show files excluded from expansion')),
566 573 (b'u', b'unknown', None, _(b'only show unknown (not tracked) files')),
567 574 ]
568 575 + cmdutil.walkopts,
569 576 _(b'hg kwfiles [OPTION]... [FILE]...'),
570 577 inferrepo=True,
571 578 )
572 579 def files(ui, repo, *pats, **opts):
573 580 """show files configured for keyword expansion
574 581
575 582 List which files in the working directory are matched by the
576 583 [keyword] configuration patterns.
577 584
578 585 Useful to prevent inadvertent keyword expansion and to speed up
579 586 execution by including only files that are actual candidates for
580 587 expansion.
581 588
582 589 See :hg:`help keyword` on how to construct patterns both for
583 590 inclusion and exclusion of files.
584 591
585 592 With -A/--all and -v/--verbose the codes used to show the status
586 593 of files are::
587 594
588 595 K = keyword expansion candidate
589 596 k = keyword expansion candidate (not tracked)
590 597 I = ignored
591 598 i = ignored (not tracked)
592 599 """
593 600 kwt = getattr(repo, '_keywordkwt', None)
594 601 wctx = repo[None]
595 602 status = _status(ui, repo, wctx, kwt, *pats, **opts)
596 603 if pats:
597 604 cwd = repo.getcwd()
598 605 else:
599 606 cwd = b''
600 607 files = []
601 608 opts = pycompat.byteskwargs(opts)
602 609 if not opts.get(b'unknown') or opts.get(b'all'):
603 610 files = sorted(status.modified + status.added + status.clean)
604 611 kwfiles = kwt.iskwfile(files, wctx)
605 612 kwdeleted = kwt.iskwfile(status.deleted, wctx)
606 613 kwunknown = kwt.iskwfile(status.unknown, wctx)
607 614 if not opts.get(b'ignore') or opts.get(b'all'):
608 615 showfiles = kwfiles, kwdeleted, kwunknown
609 616 else:
610 617 showfiles = [], [], []
611 618 if opts.get(b'all') or opts.get(b'ignore'):
612 619 showfiles += (
613 620 [f for f in files if f not in kwfiles],
614 621 [f for f in status.unknown if f not in kwunknown],
615 622 )
616 623 kwlabels = b'enabled deleted enabledunknown ignored ignoredunknown'.split()
617 624 kwstates = zip(kwlabels, pycompat.bytestr(b'K!kIi'), showfiles)
618 625 fm = ui.formatter(b'kwfiles', opts)
619 626 fmt = b'%.0s%s\n'
620 627 if opts.get(b'all') or ui.verbose:
621 628 fmt = b'%s %s\n'
622 629 for kwstate, char, filenames in kwstates:
623 630 label = b'kwfiles.' + kwstate
624 631 for f in filenames:
625 632 fm.startitem()
626 633 fm.data(kwstatus=char, path=f)
627 634 fm.plain(fmt % (char, repo.pathto(f, cwd)), label=label)
628 635 fm.end()
629 636
630 637
631 638 @command(
632 639 b'kwshrink',
633 640 cmdutil.walkopts,
634 641 _(b'hg kwshrink [OPTION]... [FILE]...'),
635 642 inferrepo=True,
636 643 )
637 644 def shrink(ui, repo, *pats, **opts):
638 645 """revert expanded keywords in the working directory
639 646
640 647 Must be run before changing/disabling active keywords.
641 648
642 649 kwshrink refuses to run if given files contain local changes.
643 650 """
644 651 # 3rd argument sets expansion to False
645 652 _kwfwrite(ui, repo, False, *pats, **opts)
646 653
647 654
648 655 # monkeypatches
649 656
650 657
651 658 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
652 659 """Monkeypatch/wrap patch.patchfile.__init__ to avoid
653 660 rejects or conflicts due to expanded keywords in working dir."""
654 661 orig(self, ui, gp, backend, store, eolmode)
655 662 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
656 663 if kwt:
657 664 # shrink keywords read from working dir
658 665 self.lines = kwt.shrinklines(self.fname, self.lines)
659 666
660 667
661 668 def kwdiff(orig, repo, *args, **kwargs):
662 669 '''Monkeypatch patch.diff to avoid expansion.'''
663 670 kwt = getattr(repo, '_keywordkwt', None)
664 671 if kwt:
665 672 restrict = kwt.restrict
666 673 kwt.restrict = True
667 674 try:
668 675 for chunk in orig(repo, *args, **kwargs):
669 676 yield chunk
670 677 finally:
671 678 if kwt:
672 679 kwt.restrict = restrict
673 680
674 681
675 682 def kwweb_skip(orig, web):
676 683 '''Wraps webcommands.x turning off keyword expansion.'''
677 684 kwt = getattr(web.repo, '_keywordkwt', None)
678 685 if kwt:
679 686 origmatch = kwt.match
680 687 kwt.match = util.never
681 688 try:
682 689 for chunk in orig(web):
683 690 yield chunk
684 691 finally:
685 692 if kwt:
686 693 kwt.match = origmatch
687 694
688 695
689 696 def kw_amend(orig, ui, repo, old, extra, pats, opts):
690 697 '''Wraps cmdutil.amend expanding keywords after amend.'''
691 698 kwt = getattr(repo, '_keywordkwt', None)
692 699 if kwt is None:
693 700 return orig(ui, repo, old, extra, pats, opts)
694 701 with repo.wlock(), repo.dirstate.parentchange():
695 702 kwt.postcommit = True
696 703 newid = orig(ui, repo, old, extra, pats, opts)
697 704 if newid != old.node():
698 705 ctx = repo[newid]
699 706 kwt.restrict = True
700 707 kwt.overwrite(ctx, ctx.files(), False, True)
701 708 kwt.restrict = False
702 709 return newid
703 710
704 711
705 712 def kw_copy(orig, ui, repo, pats, opts, rename=False):
706 713 """Wraps cmdutil.copy so that copy/rename destinations do not
707 714 contain expanded keywords.
708 715 Note that the source of a regular file destination may also be a
709 716 symlink:
710 717 hg cp sym x -> x is symlink
711 718 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
712 719 For the latter we have to follow the symlink to find out whether its
713 720 target is configured for expansion and we therefore must unexpand the
714 721 keywords in the destination."""
715 722 kwt = getattr(repo, '_keywordkwt', None)
716 723 if kwt is None:
717 724 return orig(ui, repo, pats, opts, rename)
718 725 with repo.wlock():
719 726 orig(ui, repo, pats, opts, rename)
720 727 if opts.get(b'dry_run'):
721 728 return
722 729 wctx = repo[None]
723 730 cwd = repo.getcwd()
724 731
725 732 def haskwsource(dest):
726 733 """Returns true if dest is a regular file and configured for
727 734 expansion or a symlink which points to a file configured for
728 735 expansion."""
729 736 source = repo.dirstate.copied(dest)
730 737 if b'l' in wctx.flags(source):
731 738 source = pathutil.canonpath(
732 739 repo.root, cwd, os.path.realpath(source)
733 740 )
734 741 return kwt.match(source)
735 742
736 743 candidates = [
737 744 f
738 745 for f in repo.dirstate.copies()
739 746 if b'l' not in wctx.flags(f) and haskwsource(f)
740 747 ]
741 748 kwt.overwrite(wctx, candidates, False, False)
742 749
743 750
744 751 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
745 752 '''Wraps record.dorecord expanding keywords after recording.'''
746 753 kwt = getattr(repo, '_keywordkwt', None)
747 754 if kwt is None:
748 755 return orig(ui, repo, commitfunc, *pats, **opts)
749 756 with repo.wlock():
750 757 # record returns 0 even when nothing has changed
751 758 # therefore compare nodes before and after
752 759 kwt.postcommit = True
753 760 ctx = repo[b'.']
754 761 wstatus = ctx.status()
755 762 ret = orig(ui, repo, commitfunc, *pats, **opts)
756 763 recctx = repo[b'.']
757 764 if ctx != recctx:
758 765 modified, added = _preselect(wstatus, recctx.files())
759 766 kwt.restrict = False
760 767 with repo.dirstate.parentchange():
761 768 kwt.overwrite(recctx, modified, False, True)
762 769 kwt.overwrite(recctx, added, False, True, True)
763 770 kwt.restrict = True
764 771 return ret
765 772
766 773
767 774 def kwfilectx_cmp(orig, self, fctx):
768 775 if fctx._customcmp:
769 776 return fctx.cmp(self)
770 777 kwt = getattr(self._repo, '_keywordkwt', None)
771 778 if kwt is None:
772 779 return orig(self, fctx)
773 780 # keyword affects data size, comparing wdir and filelog size does
774 781 # not make sense
775 782 if (
776 783 fctx._filenode is None
777 784 and (
778 785 self._repo._encodefilterpats
779 786 or kwt.match(fctx.path())
780 787 and b'l' not in fctx.flags()
781 788 or self.size() - 4 == fctx.size()
782 789 )
783 790 or self.size() == fctx.size()
784 791 ):
785 792 return self._filelog.cmp(self._filenode, fctx.data())
786 793 return True
787 794
788 795
789 796 def uisetup(ui):
790 797 """Monkeypatches dispatch._parse to retrieve user command.
791 798 Overrides file method to return kwfilelog instead of filelog
792 799 if file matches user configuration.
793 800 Wraps commit to overwrite configured files with updated
794 801 keyword substitutions.
795 802 Monkeypatches patch and webcommands."""
796 803
797 804 def kwdispatch_parse(orig, ui, args):
798 805 '''Monkeypatch dispatch._parse to obtain running hg command.'''
799 806 cmd, func, args, options, cmdoptions = orig(ui, args)
800 807 kwtools[b'hgcmd'] = cmd
801 808 return cmd, func, args, options, cmdoptions
802 809
803 810 extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse)
804 811
805 812 extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp)
806 813 extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init)
807 814 extensions.wrapfunction(patch, b'diff', kwdiff)
808 815 extensions.wrapfunction(cmdutil, b'amend', kw_amend)
809 816 extensions.wrapfunction(cmdutil, b'copy', kw_copy)
810 817 extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord)
811 818 for c in nokwwebcommands.split():
812 819 extensions.wrapfunction(webcommands, c, kwweb_skip)
813 820
814 821
815 822 def reposetup(ui, repo):
816 823 '''Sets up repo as kwrepo for keyword substitution.'''
817 824
818 825 try:
819 826 if (
820 827 not repo.local()
821 828 or kwtools[b'hgcmd'] in nokwcommands.split()
822 829 or b'.hg' in util.splitpath(repo.root)
823 830 or repo._url.startswith(b'bundle:')
824 831 ):
825 832 return
826 833 except AttributeError:
827 834 pass
828 835
829 836 inc, exc = [], [b'.hg*']
830 837 for pat, opt in ui.configitems(b'keyword'):
831 838 if opt != b'ignore':
832 839 inc.append(pat)
833 840 else:
834 841 exc.append(pat)
835 842 if not inc:
836 843 return
837 844
838 845 kwt = kwtemplater(ui, repo, inc, exc)
839 846
840 847 class kwrepo(repo.__class__):
841 848 def file(self, f):
842 849 if f[0] == b'/':
843 850 f = f[1:]
844 851 return kwfilelog(self.svfs, kwt, f)
845 852
846 853 def wread(self, filename):
847 854 data = super(kwrepo, self).wread(filename)
848 855 return kwt.wread(filename, data)
849 856
850 857 def commit(self, *args, **opts):
851 858 # use custom commitctx for user commands
852 859 # other extensions can still wrap repo.commitctx directly
853 860 self.commitctx = self.kwcommitctx
854 861 try:
855 862 return super(kwrepo, self).commit(*args, **opts)
856 863 finally:
857 864 del self.commitctx
858 865
859 866 def kwcommitctx(self, ctx, error=False, origctx=None):
860 867 n = super(kwrepo, self).commitctx(ctx, error, origctx)
861 868 # no lock needed, only called from repo.commit() which already locks
862 869 if not kwt.postcommit:
863 870 restrict = kwt.restrict
864 871 kwt.restrict = True
865 872 kwt.overwrite(
866 873 self[n], sorted(ctx.added() + ctx.modified()), False, True
867 874 )
868 875 kwt.restrict = restrict
869 876 return n
870 877
871 878 def rollback(self, dryrun=False, force=False):
872 879 with self.wlock():
873 880 origrestrict = kwt.restrict
874 881 try:
875 882 if not dryrun:
876 883 changed = self[b'.'].files()
877 884 ret = super(kwrepo, self).rollback(dryrun, force)
878 885 if not dryrun:
879 886 ctx = self[b'.']
880 887 modified, added = _preselect(ctx.status(), changed)
881 888 kwt.restrict = False
882 889 kwt.overwrite(ctx, modified, True, True)
883 890 kwt.overwrite(ctx, added, True, False)
884 891 return ret
885 892 finally:
886 893 kwt.restrict = origrestrict
887 894
888 895 repo.__class__ = kwrepo
889 896 repo._keywordkwt = kwt
@@ -1,790 +1,798 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''largefiles utility code: must not import other modules in this package.'''
10 10 from __future__ import absolute_import
11 11
12 12 import contextlib
13 13 import copy
14 14 import os
15 15 import stat
16 16
17 17 from mercurial.i18n import _
18 18 from mercurial.node import hex
19 19 from mercurial.pycompat import open
20 20
21 21 from mercurial import (
22 22 dirstate,
23 23 encoding,
24 24 error,
25 25 httpconnection,
26 26 match as matchmod,
27 27 pycompat,
28 28 requirements,
29 29 scmutil,
30 30 sparse,
31 31 util,
32 32 vfs as vfsmod,
33 33 )
34 34 from mercurial.utils import hashutil
35 from mercurial.dirstateutils import timestamp
35 36
36 37 shortname = b'.hglf'
37 38 shortnameslash = shortname + b'/'
38 39 longname = b'largefiles'
39 40
40 41 # -- Private worker functions ------------------------------------------
41 42
42 43
43 44 @contextlib.contextmanager
44 45 def lfstatus(repo, value=True):
45 46 oldvalue = getattr(repo, 'lfstatus', False)
46 47 repo.lfstatus = value
47 48 try:
48 49 yield
49 50 finally:
50 51 repo.lfstatus = oldvalue
51 52
52 53
53 54 def getminsize(ui, assumelfiles, opt, default=10):
54 55 lfsize = opt
55 56 if not lfsize and assumelfiles:
56 57 lfsize = ui.config(longname, b'minsize', default=default)
57 58 if lfsize:
58 59 try:
59 60 lfsize = float(lfsize)
60 61 except ValueError:
61 62 raise error.Abort(
62 63 _(b'largefiles: size must be number (not %s)\n') % lfsize
63 64 )
64 65 if lfsize is None:
65 66 raise error.Abort(_(b'minimum size for largefiles must be specified'))
66 67 return lfsize
67 68
68 69
69 70 def link(src, dest):
70 71 """Try to create hardlink - if that fails, efficiently make a copy."""
71 72 util.makedirs(os.path.dirname(dest))
72 73 try:
73 74 util.oslink(src, dest)
74 75 except OSError:
75 76 # if hardlinks fail, fallback on atomic copy
76 77 with open(src, b'rb') as srcf, util.atomictempfile(dest) as dstf:
77 78 for chunk in util.filechunkiter(srcf):
78 79 dstf.write(chunk)
79 80 os.chmod(dest, os.stat(src).st_mode)
80 81
81 82
82 83 def usercachepath(ui, hash):
83 84 """Return the correct location in the "global" largefiles cache for a file
84 85 with the given hash.
85 86 This cache is used for sharing of largefiles across repositories - both
86 87 to preserve download bandwidth and storage space."""
87 88 return os.path.join(_usercachedir(ui), hash)
88 89
89 90
90 91 def _usercachedir(ui, name=longname):
91 92 '''Return the location of the "global" largefiles cache.'''
92 93 path = ui.configpath(name, b'usercache')
93 94 if path:
94 95 return path
95 96
96 97 hint = None
97 98
98 99 if pycompat.iswindows:
99 100 appdata = encoding.environ.get(
100 101 b'LOCALAPPDATA', encoding.environ.get(b'APPDATA')
101 102 )
102 103 if appdata:
103 104 return os.path.join(appdata, name)
104 105
105 106 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
106 107 b"LOCALAPPDATA",
107 108 b"APPDATA",
108 109 name,
109 110 )
110 111 elif pycompat.isdarwin:
111 112 home = encoding.environ.get(b'HOME')
112 113 if home:
113 114 return os.path.join(home, b'Library', b'Caches', name)
114 115
115 116 hint = _(b"define %s in the environment, or set %s.usercache") % (
116 117 b"HOME",
117 118 name,
118 119 )
119 120 elif pycompat.isposix:
120 121 path = encoding.environ.get(b'XDG_CACHE_HOME')
121 122 if path:
122 123 return os.path.join(path, name)
123 124 home = encoding.environ.get(b'HOME')
124 125 if home:
125 126 return os.path.join(home, b'.cache', name)
126 127
127 128 hint = _(b"define %s or %s in the environment, or set %s.usercache") % (
128 129 b"XDG_CACHE_HOME",
129 130 b"HOME",
130 131 name,
131 132 )
132 133 else:
133 134 raise error.Abort(
134 135 _(b'unknown operating system: %s\n') % pycompat.osname
135 136 )
136 137
137 138 raise error.Abort(_(b'unknown %s usercache location') % name, hint=hint)
138 139
139 140
140 141 def inusercache(ui, hash):
141 142 path = usercachepath(ui, hash)
142 143 return os.path.exists(path)
143 144
144 145
145 146 def findfile(repo, hash):
146 147 """Return store path of the largefile with the specified hash.
147 148 As a side effect, the file might be linked from user cache.
148 149 Return None if the file can't be found locally."""
149 150 path, exists = findstorepath(repo, hash)
150 151 if exists:
151 152 repo.ui.note(_(b'found %s in store\n') % hash)
152 153 return path
153 154 elif inusercache(repo.ui, hash):
154 155 repo.ui.note(_(b'found %s in system cache\n') % hash)
155 156 path = storepath(repo, hash)
156 157 link(usercachepath(repo.ui, hash), path)
157 158 return path
158 159 return None
159 160
160 161
161 162 class largefilesdirstate(dirstate.dirstate):
162 163 def __getitem__(self, key):
163 164 return super(largefilesdirstate, self).__getitem__(unixpath(key))
164 165
165 166 def set_tracked(self, f):
166 167 return super(largefilesdirstate, self).set_tracked(unixpath(f))
167 168
168 169 def set_untracked(self, f):
169 170 return super(largefilesdirstate, self).set_untracked(unixpath(f))
170 171
171 172 def normal(self, f, parentfiledata=None):
172 173 # not sure if we should pass the `parentfiledata` down or throw it
173 174 # away. So throwing it away to stay on the safe side.
174 175 return super(largefilesdirstate, self).normal(unixpath(f))
175 176
176 177 def remove(self, f):
177 178 return super(largefilesdirstate, self).remove(unixpath(f))
178 179
179 180 def add(self, f):
180 181 return super(largefilesdirstate, self).add(unixpath(f))
181 182
182 183 def drop(self, f):
183 184 return super(largefilesdirstate, self).drop(unixpath(f))
184 185
185 186 def forget(self, f):
186 187 return super(largefilesdirstate, self).forget(unixpath(f))
187 188
188 189 def normallookup(self, f):
189 190 return super(largefilesdirstate, self).normallookup(unixpath(f))
190 191
191 192 def _ignore(self, f):
192 193 return False
193 194
194 195 def write(self, tr):
195 196 # (1) disable PENDING mode always
196 197 # (lfdirstate isn't yet managed as a part of the transaction)
197 198 # (2) avoid develwarn 'use dirstate.write with ....'
198 199 if tr:
199 200 tr.addbackup(b'largefiles/dirstate', location=b'plain')
200 201 super(largefilesdirstate, self).write(None)
201 202
202 203
203 204 def openlfdirstate(ui, repo, create=True):
204 205 """
205 206 Return a dirstate object that tracks largefiles: i.e. its root is
206 207 the repo root, but it is saved in .hg/largefiles/dirstate.
207 208 """
208 209 vfs = repo.vfs
209 210 lfstoredir = longname
210 211 opener = vfsmod.vfs(vfs.join(lfstoredir))
211 212 use_dirstate_v2 = requirements.DIRSTATE_V2_REQUIREMENT in repo.requirements
212 213 lfdirstate = largefilesdirstate(
213 214 opener,
214 215 ui,
215 216 repo.root,
216 217 repo.dirstate._validate,
217 218 lambda: sparse.matcher(repo),
218 219 repo.nodeconstants,
219 220 use_dirstate_v2,
220 221 )
221 222
222 223 # If the largefiles dirstate does not exist, populate and create
223 224 # it. This ensures that we create it on the first meaningful
224 225 # largefiles operation in a new clone.
225 226 if create and not vfs.exists(vfs.join(lfstoredir, b'dirstate')):
226 227 matcher = getstandinmatcher(repo)
227 228 standins = repo.dirstate.walk(
228 229 matcher, subrepos=[], unknown=False, ignored=False
229 230 )
230 231
231 232 if len(standins) > 0:
232 233 vfs.makedirs(lfstoredir)
233 234
234 235 with lfdirstate.parentchange():
235 236 for standin in standins:
236 237 lfile = splitstandin(standin)
237 238 lfdirstate.update_file(
238 239 lfile, p1_tracked=True, wc_tracked=True, possibly_dirty=True
239 240 )
240 241 return lfdirstate
241 242
242 243
243 244 def lfdirstatestatus(lfdirstate, repo):
244 245 pctx = repo[b'.']
245 246 match = matchmod.always()
246 unsure, s = lfdirstate.status(
247 unsure, s, mtime_boundary = lfdirstate.status(
247 248 match, subrepos=[], ignored=False, clean=False, unknown=False
248 249 )
249 250 modified, clean = s.modified, s.clean
251 wctx = repo[None]
250 252 for lfile in unsure:
251 253 try:
252 254 fctx = pctx[standin(lfile)]
253 255 except LookupError:
254 256 fctx = None
255 257 if not fctx or readasstandin(fctx) != hashfile(repo.wjoin(lfile)):
256 258 modified.append(lfile)
257 259 else:
258 260 clean.append(lfile)
259 lfdirstate.set_clean(lfile)
261 st = wctx[lfile].lstat()
262 mode = st.st_mode
263 size = st.st_size
264 mtime = timestamp.reliable_mtime_of(st, mtime_boundary)
265 if mtime is not None:
266 cache_data = (mode, size, mtime)
267 lfdirstate.set_clean(lfile, cache_data)
260 268 return s
261 269
262 270
263 271 def listlfiles(repo, rev=None, matcher=None):
264 272 """return a list of largefiles in the working copy or the
265 273 specified changeset"""
266 274
267 275 if matcher is None:
268 276 matcher = getstandinmatcher(repo)
269 277
270 278 # ignore unknown files in working directory
271 279 return [
272 280 splitstandin(f)
273 281 for f in repo[rev].walk(matcher)
274 282 if rev is not None or repo.dirstate.get_entry(f).any_tracked
275 283 ]
276 284
277 285
278 286 def instore(repo, hash, forcelocal=False):
279 287 '''Return true if a largefile with the given hash exists in the store'''
280 288 return os.path.exists(storepath(repo, hash, forcelocal))
281 289
282 290
283 291 def storepath(repo, hash, forcelocal=False):
284 292 """Return the correct location in the repository largefiles store for a
285 293 file with the given hash."""
286 294 if not forcelocal and repo.shared():
287 295 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
288 296 return repo.vfs.join(longname, hash)
289 297
290 298
291 299 def findstorepath(repo, hash):
292 300 """Search through the local store path(s) to find the file for the given
293 301 hash. If the file is not found, its path in the primary store is returned.
294 302 The return value is a tuple of (path, exists(path)).
295 303 """
296 304 # For shared repos, the primary store is in the share source. But for
297 305 # backward compatibility, force a lookup in the local store if it wasn't
298 306 # found in the share source.
299 307 path = storepath(repo, hash, False)
300 308
301 309 if instore(repo, hash):
302 310 return (path, True)
303 311 elif repo.shared() and instore(repo, hash, True):
304 312 return storepath(repo, hash, True), True
305 313
306 314 return (path, False)
307 315
308 316
309 317 def copyfromcache(repo, hash, filename):
310 318 """Copy the specified largefile from the repo or system cache to
311 319 filename in the repository. Return true on success or false if the
312 320 file was not found in either cache (which should not happened:
313 321 this is meant to be called only after ensuring that the needed
314 322 largefile exists in the cache)."""
315 323 wvfs = repo.wvfs
316 324 path = findfile(repo, hash)
317 325 if path is None:
318 326 return False
319 327 wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
320 328 # The write may fail before the file is fully written, but we
321 329 # don't use atomic writes in the working copy.
322 330 with open(path, b'rb') as srcfd, wvfs(filename, b'wb') as destfd:
323 331 gothash = copyandhash(util.filechunkiter(srcfd), destfd)
324 332 if gothash != hash:
325 333 repo.ui.warn(
326 334 _(b'%s: data corruption in %s with hash %s\n')
327 335 % (filename, path, gothash)
328 336 )
329 337 wvfs.unlink(filename)
330 338 return False
331 339 return True
332 340
333 341
334 342 def copytostore(repo, ctx, file, fstandin):
335 343 wvfs = repo.wvfs
336 344 hash = readasstandin(ctx[fstandin])
337 345 if instore(repo, hash):
338 346 return
339 347 if wvfs.exists(file):
340 348 copytostoreabsolute(repo, wvfs.join(file), hash)
341 349 else:
342 350 repo.ui.warn(
343 351 _(b"%s: largefile %s not available from local store\n")
344 352 % (file, hash)
345 353 )
346 354
347 355
348 356 def copyalltostore(repo, node):
349 357 '''Copy all largefiles in a given revision to the store'''
350 358
351 359 ctx = repo[node]
352 360 for filename in ctx.files():
353 361 realfile = splitstandin(filename)
354 362 if realfile is not None and filename in ctx.manifest():
355 363 copytostore(repo, ctx, realfile, filename)
356 364
357 365
358 366 def copytostoreabsolute(repo, file, hash):
359 367 if inusercache(repo.ui, hash):
360 368 link(usercachepath(repo.ui, hash), storepath(repo, hash))
361 369 else:
362 370 util.makedirs(os.path.dirname(storepath(repo, hash)))
363 371 with open(file, b'rb') as srcf:
364 372 with util.atomictempfile(
365 373 storepath(repo, hash), createmode=repo.store.createmode
366 374 ) as dstf:
367 375 for chunk in util.filechunkiter(srcf):
368 376 dstf.write(chunk)
369 377 linktousercache(repo, hash)
370 378
371 379
372 380 def linktousercache(repo, hash):
373 381 """Link / copy the largefile with the specified hash from the store
374 382 to the cache."""
375 383 path = usercachepath(repo.ui, hash)
376 384 link(storepath(repo, hash), path)
377 385
378 386
379 387 def getstandinmatcher(repo, rmatcher=None):
380 388 '''Return a match object that applies rmatcher to the standin directory'''
381 389 wvfs = repo.wvfs
382 390 standindir = shortname
383 391
384 392 # no warnings about missing files or directories
385 393 badfn = lambda f, msg: None
386 394
387 395 if rmatcher and not rmatcher.always():
388 396 pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
389 397 if not pats:
390 398 pats = [wvfs.join(standindir)]
391 399 match = scmutil.match(repo[None], pats, badfn=badfn)
392 400 else:
393 401 # no patterns: relative to repo root
394 402 match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
395 403 return match
396 404
397 405
398 406 def composestandinmatcher(repo, rmatcher):
399 407 """Return a matcher that accepts standins corresponding to the
400 408 files accepted by rmatcher. Pass the list of files in the matcher
401 409 as the paths specified by the user."""
402 410 smatcher = getstandinmatcher(repo, rmatcher)
403 411 isstandin = smatcher.matchfn
404 412
405 413 def composedmatchfn(f):
406 414 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
407 415
408 416 smatcher.matchfn = composedmatchfn
409 417
410 418 return smatcher
411 419
412 420
413 421 def standin(filename):
414 422 """Return the repo-relative path to the standin for the specified big
415 423 file."""
416 424 # Notes:
417 425 # 1) Some callers want an absolute path, but for instance addlargefiles
418 426 # needs it repo-relative so it can be passed to repo[None].add(). So
419 427 # leave it up to the caller to use repo.wjoin() to get an absolute path.
420 428 # 2) Join with '/' because that's what dirstate always uses, even on
421 429 # Windows. Change existing separator to '/' first in case we are
422 430 # passed filenames from an external source (like the command line).
423 431 return shortnameslash + util.pconvert(filename)
424 432
425 433
426 434 def isstandin(filename):
427 435 """Return true if filename is a big file standin. filename must be
428 436 in Mercurial's internal form (slash-separated)."""
429 437 return filename.startswith(shortnameslash)
430 438
431 439
432 440 def splitstandin(filename):
433 441 # Split on / because that's what dirstate always uses, even on Windows.
434 442 # Change local separator to / first just in case we are passed filenames
435 443 # from an external source (like the command line).
436 444 bits = util.pconvert(filename).split(b'/', 1)
437 445 if len(bits) == 2 and bits[0] == shortname:
438 446 return bits[1]
439 447 else:
440 448 return None
441 449
442 450
443 451 def updatestandin(repo, lfile, standin):
444 452 """Re-calculate hash value of lfile and write it into standin
445 453
446 454 This assumes that "lfutil.standin(lfile) == standin", for efficiency.
447 455 """
448 456 file = repo.wjoin(lfile)
449 457 if repo.wvfs.exists(lfile):
450 458 hash = hashfile(file)
451 459 executable = getexecutable(file)
452 460 writestandin(repo, standin, hash, executable)
453 461 else:
454 462 raise error.Abort(_(b'%s: file not found!') % lfile)
455 463
456 464
457 465 def readasstandin(fctx):
458 466 """read hex hash from given filectx of standin file
459 467
460 468 This encapsulates how "standin" data is stored into storage layer."""
461 469 return fctx.data().strip()
462 470
463 471
464 472 def writestandin(repo, standin, hash, executable):
465 473 '''write hash to <repo.root>/<standin>'''
466 474 repo.wwrite(standin, hash + b'\n', executable and b'x' or b'')
467 475
468 476
469 477 def copyandhash(instream, outfile):
470 478 """Read bytes from instream (iterable) and write them to outfile,
471 479 computing the SHA-1 hash of the data along the way. Return the hash."""
472 480 hasher = hashutil.sha1(b'')
473 481 for data in instream:
474 482 hasher.update(data)
475 483 outfile.write(data)
476 484 return hex(hasher.digest())
477 485
478 486
479 487 def hashfile(file):
480 488 if not os.path.exists(file):
481 489 return b''
482 490 with open(file, b'rb') as fd:
483 491 return hexsha1(fd)
484 492
485 493
486 494 def getexecutable(filename):
487 495 mode = os.stat(filename).st_mode
488 496 return (
489 497 (mode & stat.S_IXUSR)
490 498 and (mode & stat.S_IXGRP)
491 499 and (mode & stat.S_IXOTH)
492 500 )
493 501
494 502
495 503 def urljoin(first, second, *arg):
496 504 def join(left, right):
497 505 if not left.endswith(b'/'):
498 506 left += b'/'
499 507 if right.startswith(b'/'):
500 508 right = right[1:]
501 509 return left + right
502 510
503 511 url = join(first, second)
504 512 for a in arg:
505 513 url = join(url, a)
506 514 return url
507 515
508 516
509 517 def hexsha1(fileobj):
510 518 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
511 519 object data"""
512 520 h = hashutil.sha1()
513 521 for chunk in util.filechunkiter(fileobj):
514 522 h.update(chunk)
515 523 return hex(h.digest())
516 524
517 525
518 526 def httpsendfile(ui, filename):
519 527 return httpconnection.httpsendfile(ui, filename, b'rb')
520 528
521 529
522 530 def unixpath(path):
523 531 '''Return a version of path normalized for use with the lfdirstate.'''
524 532 return util.pconvert(os.path.normpath(path))
525 533
526 534
527 535 def islfilesrepo(repo):
528 536 '''Return true if the repo is a largefile repo.'''
529 537 if b'largefiles' in repo.requirements and any(
530 538 shortnameslash in f[1] for f in repo.store.datafiles()
531 539 ):
532 540 return True
533 541
534 542 return any(openlfdirstate(repo.ui, repo, False))
535 543
536 544
537 545 class storeprotonotcapable(Exception):
538 546 def __init__(self, storetypes):
539 547 self.storetypes = storetypes
540 548
541 549
542 550 def getstandinsstate(repo):
543 551 standins = []
544 552 matcher = getstandinmatcher(repo)
545 553 wctx = repo[None]
546 554 for standin in repo.dirstate.walk(
547 555 matcher, subrepos=[], unknown=False, ignored=False
548 556 ):
549 557 lfile = splitstandin(standin)
550 558 try:
551 559 hash = readasstandin(wctx[standin])
552 560 except IOError:
553 561 hash = None
554 562 standins.append((lfile, hash))
555 563 return standins
556 564
557 565
558 566 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
559 567 lfstandin = standin(lfile)
560 568 if lfstandin not in repo.dirstate:
561 569 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=False)
562 570 else:
563 571 entry = repo.dirstate.get_entry(lfstandin)
564 572 lfdirstate.update_file(
565 573 lfile,
566 574 wc_tracked=entry.tracked,
567 575 p1_tracked=entry.p1_tracked,
568 576 p2_info=entry.p2_info,
569 577 possibly_dirty=True,
570 578 )
571 579
572 580
573 581 def markcommitted(orig, ctx, node):
574 582 repo = ctx.repo()
575 583
576 584 lfdirstate = openlfdirstate(repo.ui, repo)
577 585 with lfdirstate.parentchange():
578 586 orig(node)
579 587
580 588 # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
581 589 # because files coming from the 2nd parent are omitted in the latter.
582 590 #
583 591 # The former should be used to get targets of "synclfdirstate",
584 592 # because such files:
585 593 # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
586 594 # - have to be marked as "n" after commit, but
587 595 # - aren't listed in "repo[node].files()"
588 596
589 597 for f in ctx.files():
590 598 lfile = splitstandin(f)
591 599 if lfile is not None:
592 600 synclfdirstate(repo, lfdirstate, lfile, False)
593 601 lfdirstate.write(repo.currenttransaction())
594 602
595 603 # As part of committing, copy all of the largefiles into the cache.
596 604 #
597 605 # Using "node" instead of "ctx" implies additional "repo[node]"
598 606 # lookup while copyalltostore(), but can omit redundant check for
599 607 # files comming from the 2nd parent, which should exist in store
600 608 # at merging.
601 609 copyalltostore(repo, node)
602 610
603 611
604 612 def getlfilestoupdate(oldstandins, newstandins):
605 613 changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
606 614 filelist = []
607 615 for f in changedstandins:
608 616 if f[0] not in filelist:
609 617 filelist.append(f[0])
610 618 return filelist
611 619
612 620
613 621 def getlfilestoupload(repo, missing, addfunc):
614 622 makeprogress = repo.ui.makeprogress
615 623 with makeprogress(
616 624 _(b'finding outgoing largefiles'),
617 625 unit=_(b'revisions'),
618 626 total=len(missing),
619 627 ) as progress:
620 628 for i, n in enumerate(missing):
621 629 progress.update(i)
622 630 parents = [p for p in repo[n].parents() if p != repo.nullid]
623 631
624 632 with lfstatus(repo, value=False):
625 633 ctx = repo[n]
626 634
627 635 files = set(ctx.files())
628 636 if len(parents) == 2:
629 637 mc = ctx.manifest()
630 638 mp1 = ctx.p1().manifest()
631 639 mp2 = ctx.p2().manifest()
632 640 for f in mp1:
633 641 if f not in mc:
634 642 files.add(f)
635 643 for f in mp2:
636 644 if f not in mc:
637 645 files.add(f)
638 646 for f in mc:
639 647 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
640 648 files.add(f)
641 649 for fn in files:
642 650 if isstandin(fn) and fn in ctx:
643 651 addfunc(fn, readasstandin(ctx[fn]))
644 652
645 653
646 654 def updatestandinsbymatch(repo, match):
647 655 """Update standins in the working directory according to specified match
648 656
649 657 This returns (possibly modified) ``match`` object to be used for
650 658 subsequent commit process.
651 659 """
652 660
653 661 ui = repo.ui
654 662
655 663 # Case 1: user calls commit with no specific files or
656 664 # include/exclude patterns: refresh and commit all files that
657 665 # are "dirty".
658 666 if match is None or match.always():
659 667 # Spend a bit of time here to get a list of files we know
660 668 # are modified so we can compare only against those.
661 669 # It can cost a lot of time (several seconds)
662 670 # otherwise to update all standins if the largefiles are
663 671 # large.
664 672 lfdirstate = openlfdirstate(ui, repo)
665 673 dirtymatch = matchmod.always()
666 unsure, s = lfdirstate.status(
674 unsure, s, mtime_boundary = lfdirstate.status(
667 675 dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False
668 676 )
669 677 modifiedfiles = unsure + s.modified + s.added + s.removed
670 678 lfiles = listlfiles(repo)
671 679 # this only loops through largefiles that exist (not
672 680 # removed/renamed)
673 681 for lfile in lfiles:
674 682 if lfile in modifiedfiles:
675 683 fstandin = standin(lfile)
676 684 if repo.wvfs.exists(fstandin):
677 685 # this handles the case where a rebase is being
678 686 # performed and the working copy is not updated
679 687 # yet.
680 688 if repo.wvfs.exists(lfile):
681 689 updatestandin(repo, lfile, fstandin)
682 690
683 691 return match
684 692
685 693 lfiles = listlfiles(repo)
686 694 match._files = repo._subdirlfs(match.files(), lfiles)
687 695
688 696 # Case 2: user calls commit with specified patterns: refresh
689 697 # any matching big files.
690 698 smatcher = composestandinmatcher(repo, match)
691 699 standins = repo.dirstate.walk(
692 700 smatcher, subrepos=[], unknown=False, ignored=False
693 701 )
694 702
695 703 # No matching big files: get out of the way and pass control to
696 704 # the usual commit() method.
697 705 if not standins:
698 706 return match
699 707
700 708 # Refresh all matching big files. It's possible that the
701 709 # commit will end up failing, in which case the big files will
702 710 # stay refreshed. No harm done: the user modified them and
703 711 # asked to commit them, so sooner or later we're going to
704 712 # refresh the standins. Might as well leave them refreshed.
705 713 lfdirstate = openlfdirstate(ui, repo)
706 714 for fstandin in standins:
707 715 lfile = splitstandin(fstandin)
708 716 if lfdirstate.get_entry(lfile).tracked:
709 717 updatestandin(repo, lfile, fstandin)
710 718
711 719 # Cook up a new matcher that only matches regular files or
712 720 # standins corresponding to the big files requested by the
713 721 # user. Have to modify _files to prevent commit() from
714 722 # complaining "not tracked" for big files.
715 723 match = copy.copy(match)
716 724 origmatchfn = match.matchfn
717 725
718 726 # Check both the list of largefiles and the list of
719 727 # standins because if a largefile was removed, it
720 728 # won't be in the list of largefiles at this point
721 729 match._files += sorted(standins)
722 730
723 731 actualfiles = []
724 732 for f in match._files:
725 733 fstandin = standin(f)
726 734
727 735 # For largefiles, only one of the normal and standin should be
728 736 # committed (except if one of them is a remove). In the case of a
729 737 # standin removal, drop the normal file if it is unknown to dirstate.
730 738 # Thus, skip plain largefile names but keep the standin.
731 739 if f in lfiles or fstandin in standins:
732 740 if not repo.dirstate.get_entry(fstandin).removed:
733 741 if not repo.dirstate.get_entry(f).removed:
734 742 continue
735 743 elif not repo.dirstate.get_entry(f).any_tracked:
736 744 continue
737 745
738 746 actualfiles.append(f)
739 747 match._files = actualfiles
740 748
741 749 def matchfn(f):
742 750 if origmatchfn(f):
743 751 return f not in lfiles
744 752 else:
745 753 return f in standins
746 754
747 755 match.matchfn = matchfn
748 756
749 757 return match
750 758
751 759
752 760 class automatedcommithook(object):
753 761 """Stateful hook to update standins at the 1st commit of resuming
754 762
755 763 For efficiency, updating standins in the working directory should
756 764 be avoided while automated committing (like rebase, transplant and
757 765 so on), because they should be updated before committing.
758 766
759 767 But the 1st commit of resuming automated committing (e.g. ``rebase
760 768 --continue``) should update them, because largefiles may be
761 769 modified manually.
762 770 """
763 771
764 772 def __init__(self, resuming):
765 773 self.resuming = resuming
766 774
767 775 def __call__(self, repo, match):
768 776 if self.resuming:
769 777 self.resuming = False # avoids updating at subsequent commits
770 778 return updatestandinsbymatch(repo, match)
771 779 else:
772 780 return match
773 781
774 782
775 783 def getstatuswriter(ui, repo, forcibly=None):
776 784 """Return the function to write largefiles specific status out
777 785
778 786 If ``forcibly`` is ``None``, this returns the last element of
779 787 ``repo._lfstatuswriters`` as "default" writer function.
780 788
781 789 Otherwise, this returns the function to always write out (or
782 790 ignore if ``not forcibly``) status.
783 791 """
784 792 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
785 793 return repo._lfstatuswriters[-1]
786 794 else:
787 795 if forcibly:
788 796 return ui.status # forcibly WRITE OUT
789 797 else:
790 798 return lambda *msg, **opts: None # forcibly IGNORE
@@ -1,1857 +1,1866 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13 import os
14 14
15 15 from mercurial.i18n import _
16 16
17 17 from mercurial.pycompat import open
18 18
19 19 from mercurial.hgweb import webcommands
20 20
21 21 from mercurial import (
22 22 archival,
23 23 cmdutil,
24 24 copies as copiesmod,
25 25 error,
26 26 exchange,
27 27 extensions,
28 28 exthelper,
29 29 filemerge,
30 30 hg,
31 31 logcmdutil,
32 32 match as matchmod,
33 33 merge,
34 34 mergestate as mergestatemod,
35 35 pathutil,
36 36 pycompat,
37 37 scmutil,
38 38 smartset,
39 39 subrepo,
40 40 url as urlmod,
41 41 util,
42 42 )
43 43
44 44 from mercurial.upgrade_utils import (
45 45 actions as upgrade_actions,
46 46 )
47 47
48 48 from . import (
49 49 lfcommands,
50 50 lfutil,
51 51 storefactory,
52 52 )
53 53
54 ACTION_ADD = mergestatemod.ACTION_ADD
55 ACTION_DELETED_CHANGED = mergestatemod.ACTION_DELETED_CHANGED
56 ACTION_GET = mergestatemod.ACTION_GET
57 ACTION_KEEP = mergestatemod.ACTION_KEEP
58 ACTION_REMOVE = mergestatemod.ACTION_REMOVE
59
54 60 eh = exthelper.exthelper()
55 61
56 62 lfstatus = lfutil.lfstatus
57 63
58 MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'lfmr'
64 MERGE_ACTION_LARGEFILE_MARK_REMOVED = mergestatemod.MergeAction('lfmr')
59 65
60 66 # -- Utility functions: commonly/repeatedly needed functionality ---------------
61 67
62 68
63 69 def composelargefilematcher(match, manifest):
64 70 """create a matcher that matches only the largefiles in the original
65 71 matcher"""
66 72 m = copy.copy(match)
67 73 lfile = lambda f: lfutil.standin(f) in manifest
68 74 m._files = [lf for lf in m._files if lfile(lf)]
69 75 m._fileset = set(m._files)
70 76 m.always = lambda: False
71 77 origmatchfn = m.matchfn
72 78 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
73 79 return m
74 80
75 81
76 82 def composenormalfilematcher(match, manifest, exclude=None):
77 83 excluded = set()
78 84 if exclude is not None:
79 85 excluded.update(exclude)
80 86
81 87 m = copy.copy(match)
82 88 notlfile = lambda f: not (
83 89 lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
84 90 )
85 91 m._files = [lf for lf in m._files if notlfile(lf)]
86 92 m._fileset = set(m._files)
87 93 m.always = lambda: False
88 94 origmatchfn = m.matchfn
89 95 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
90 96 return m
91 97
92 98
93 99 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
94 100 large = opts.get('large')
95 101 lfsize = lfutil.getminsize(
96 102 ui, lfutil.islfilesrepo(repo), opts.get('lfsize')
97 103 )
98 104
99 105 lfmatcher = None
100 106 if lfutil.islfilesrepo(repo):
101 107 lfpats = ui.configlist(lfutil.longname, b'patterns')
102 108 if lfpats:
103 109 lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
104 110
105 111 lfnames = []
106 112 m = matcher
107 113
108 114 wctx = repo[None]
109 115 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
110 116 exact = m.exact(f)
111 117 lfile = lfutil.standin(f) in wctx
112 118 nfile = f in wctx
113 119 exists = lfile or nfile
114 120
115 121 # Don't warn the user when they attempt to add a normal tracked file.
116 122 # The normal add code will do that for us.
117 123 if exact and exists:
118 124 if lfile:
119 125 ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
120 126 continue
121 127
122 128 if (exact or not exists) and not lfutil.isstandin(f):
123 129 # In case the file was removed previously, but not committed
124 130 # (issue3507)
125 131 if not repo.wvfs.exists(f):
126 132 continue
127 133
128 134 abovemin = (
129 135 lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
130 136 )
131 137 if large or abovemin or (lfmatcher and lfmatcher(f)):
132 138 lfnames.append(f)
133 139 if ui.verbose or not exact:
134 140 ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
135 141
136 142 bad = []
137 143
138 144 # Need to lock, otherwise there could be a race condition between
139 145 # when standins are created and added to the repo.
140 146 with repo.wlock():
141 147 if not opts.get('dry_run'):
142 148 standins = []
143 149 lfdirstate = lfutil.openlfdirstate(ui, repo)
144 150 for f in lfnames:
145 151 standinname = lfutil.standin(f)
146 152 lfutil.writestandin(
147 153 repo,
148 154 standinname,
149 155 hash=b'',
150 156 executable=lfutil.getexecutable(repo.wjoin(f)),
151 157 )
152 158 standins.append(standinname)
153 159 lfdirstate.set_tracked(f)
154 160 lfdirstate.write(repo.currenttransaction())
155 161 bad += [
156 162 lfutil.splitstandin(f)
157 163 for f in repo[None].add(standins)
158 164 if f in m.files()
159 165 ]
160 166
161 167 added = [f for f in lfnames if f not in bad]
162 168 return added, bad
163 169
164 170
165 171 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
166 172 after = opts.get('after')
167 173 m = composelargefilematcher(matcher, repo[None].manifest())
168 174 with lfstatus(repo):
169 175 s = repo.status(match=m, clean=not isaddremove)
170 176 manifest = repo[None].manifest()
171 177 modified, added, deleted, clean = [
172 178 [f for f in list if lfutil.standin(f) in manifest]
173 179 for list in (s.modified, s.added, s.deleted, s.clean)
174 180 ]
175 181
176 182 def warn(files, msg):
177 183 for f in files:
178 184 ui.warn(msg % uipathfn(f))
179 185 return int(len(files) > 0)
180 186
181 187 if after:
182 188 remove = deleted
183 189 result = warn(
184 190 modified + added + clean, _(b'not removing %s: file still exists\n')
185 191 )
186 192 else:
187 193 remove = deleted + clean
188 194 result = warn(
189 195 modified,
190 196 _(
191 197 b'not removing %s: file is modified (use -f'
192 198 b' to force removal)\n'
193 199 ),
194 200 )
195 201 result = (
196 202 warn(
197 203 added,
198 204 _(
199 205 b'not removing %s: file has been marked for add'
200 206 b' (use forget to undo)\n'
201 207 ),
202 208 )
203 209 or result
204 210 )
205 211
206 212 # Need to lock because standin files are deleted then removed from the
207 213 # repository and we could race in-between.
208 214 with repo.wlock():
209 215 lfdirstate = lfutil.openlfdirstate(ui, repo)
210 216 for f in sorted(remove):
211 217 if ui.verbose or not m.exact(f):
212 218 ui.status(_(b'removing %s\n') % uipathfn(f))
213 219
214 220 if not dryrun:
215 221 if not after:
216 222 repo.wvfs.unlinkpath(f, ignoremissing=True)
217 223
218 224 if dryrun:
219 225 return result
220 226
221 227 remove = [lfutil.standin(f) for f in remove]
222 228 # If this is being called by addremove, let the original addremove
223 229 # function handle this.
224 230 if not isaddremove:
225 231 for f in remove:
226 232 repo.wvfs.unlinkpath(f, ignoremissing=True)
227 233 repo[None].forget(remove)
228 234
229 235 for f in remove:
230 236 lfdirstate.set_untracked(lfutil.splitstandin(f))
231 237
232 238 lfdirstate.write(repo.currenttransaction())
233 239
234 240 return result
235 241
236 242
237 243 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 244 # appear at their right place in the manifests.
239 245 @eh.wrapfunction(webcommands, b'decodepath')
240 246 def decodepath(orig, path):
241 247 return lfutil.splitstandin(path) or path
242 248
243 249
244 250 # -- Wrappers: modify existing commands --------------------------------
245 251
246 252
247 253 @eh.wrapcommand(
248 254 b'add',
249 255 opts=[
250 256 (b'', b'large', None, _(b'add as largefile')),
251 257 (b'', b'normal', None, _(b'add as normal file')),
252 258 (
253 259 b'',
254 260 b'lfsize',
255 261 b'',
256 262 _(
257 263 b'add all files above this size (in megabytes) '
258 264 b'as largefiles (default: 10)'
259 265 ),
260 266 ),
261 267 ],
262 268 )
263 269 def overrideadd(orig, ui, repo, *pats, **opts):
264 270 if opts.get('normal') and opts.get('large'):
265 271 raise error.Abort(_(b'--normal cannot be used with --large'))
266 272 return orig(ui, repo, *pats, **opts)
267 273
268 274
269 275 @eh.wrapfunction(cmdutil, b'add')
270 276 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
271 277 # The --normal flag short circuits this override
272 278 if opts.get('normal'):
273 279 return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
274 280
275 281 ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
276 282 normalmatcher = composenormalfilematcher(
277 283 matcher, repo[None].manifest(), ladded
278 284 )
279 285 bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
280 286
281 287 bad.extend(f for f in lbad)
282 288 return bad
283 289
284 290
285 291 @eh.wrapfunction(cmdutil, b'remove')
286 292 def cmdutilremove(
287 293 orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
288 294 ):
289 295 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
290 296 result = orig(
291 297 ui,
292 298 repo,
293 299 normalmatcher,
294 300 prefix,
295 301 uipathfn,
296 302 after,
297 303 force,
298 304 subrepos,
299 305 dryrun,
300 306 )
301 307 return (
302 308 removelargefiles(
303 309 ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
304 310 )
305 311 or result
306 312 )
307 313
308 314
309 315 @eh.wrapfunction(subrepo.hgsubrepo, b'status')
310 316 def overridestatusfn(orig, repo, rev2, **opts):
311 317 with lfstatus(repo._repo):
312 318 return orig(repo, rev2, **opts)
313 319
314 320
315 321 @eh.wrapcommand(b'status')
316 322 def overridestatus(orig, ui, repo, *pats, **opts):
317 323 with lfstatus(repo):
318 324 return orig(ui, repo, *pats, **opts)
319 325
320 326
321 327 @eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
322 328 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
323 329 with lfstatus(repo._repo):
324 330 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
325 331
326 332
327 333 @eh.wrapcommand(b'log')
328 334 def overridelog(orig, ui, repo, *pats, **opts):
329 335 def overridematchandpats(
330 336 orig,
331 337 ctx,
332 338 pats=(),
333 339 opts=None,
334 340 globbed=False,
335 341 default=b'relpath',
336 342 badfn=None,
337 343 ):
338 344 """Matcher that merges root directory with .hglf, suitable for log.
339 345 It is still possible to match .hglf directly.
340 346 For any listed files run log on the standin too.
341 347 matchfn tries both the given filename and with .hglf stripped.
342 348 """
343 349 if opts is None:
344 350 opts = {}
345 351 matchandpats = orig(ctx, pats, opts, globbed, default, badfn=badfn)
346 352 m, p = copy.copy(matchandpats)
347 353
348 354 if m.always():
349 355 # We want to match everything anyway, so there's no benefit trying
350 356 # to add standins.
351 357 return matchandpats
352 358
353 359 pats = set(p)
354 360
355 361 def fixpats(pat, tostandin=lfutil.standin):
356 362 if pat.startswith(b'set:'):
357 363 return pat
358 364
359 365 kindpat = matchmod._patsplit(pat, None)
360 366
361 367 if kindpat[0] is not None:
362 368 return kindpat[0] + b':' + tostandin(kindpat[1])
363 369 return tostandin(kindpat[1])
364 370
365 371 cwd = repo.getcwd()
366 372 if cwd:
367 373 hglf = lfutil.shortname
368 374 back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
369 375
370 376 def tostandin(f):
371 377 # The file may already be a standin, so truncate the back
372 378 # prefix and test before mangling it. This avoids turning
373 379 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
374 380 if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
375 381 return f
376 382
377 383 # An absolute path is from outside the repo, so truncate the
378 384 # path to the root before building the standin. Otherwise cwd
379 385 # is somewhere in the repo, relative to root, and needs to be
380 386 # prepended before building the standin.
381 387 if os.path.isabs(cwd):
382 388 f = f[len(back) :]
383 389 else:
384 390 f = cwd + b'/' + f
385 391 return back + lfutil.standin(f)
386 392
387 393 else:
388 394
389 395 def tostandin(f):
390 396 if lfutil.isstandin(f):
391 397 return f
392 398 return lfutil.standin(f)
393 399
394 400 pats.update(fixpats(f, tostandin) for f in p)
395 401
396 402 for i in range(0, len(m._files)):
397 403 # Don't add '.hglf' to m.files, since that is already covered by '.'
398 404 if m._files[i] == b'.':
399 405 continue
400 406 standin = lfutil.standin(m._files[i])
401 407 # If the "standin" is a directory, append instead of replace to
402 408 # support naming a directory on the command line with only
403 409 # largefiles. The original directory is kept to support normal
404 410 # files.
405 411 if standin in ctx:
406 412 m._files[i] = standin
407 413 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
408 414 m._files.append(standin)
409 415
410 416 m._fileset = set(m._files)
411 417 m.always = lambda: False
412 418 origmatchfn = m.matchfn
413 419
414 420 def lfmatchfn(f):
415 421 lf = lfutil.splitstandin(f)
416 422 if lf is not None and origmatchfn(lf):
417 423 return True
418 424 r = origmatchfn(f)
419 425 return r
420 426
421 427 m.matchfn = lfmatchfn
422 428
423 429 ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
424 430 return m, pats
425 431
426 432 # For hg log --patch, the match object is used in two different senses:
427 433 # (1) to determine what revisions should be printed out, and
428 434 # (2) to determine what files to print out diffs for.
429 435 # The magic matchandpats override should be used for case (1) but not for
430 436 # case (2).
431 437 oldmatchandpats = scmutil.matchandpats
432 438
433 439 def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
434 440 wctx = repo[None]
435 441 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
436 442 return lambda ctx: match
437 443
438 444 wrappedmatchandpats = extensions.wrappedfunction(
439 445 scmutil, b'matchandpats', overridematchandpats
440 446 )
441 447 wrappedmakefilematcher = extensions.wrappedfunction(
442 448 logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
443 449 )
444 450 with wrappedmatchandpats, wrappedmakefilematcher:
445 451 return orig(ui, repo, *pats, **opts)
446 452
447 453
448 454 @eh.wrapcommand(
449 455 b'verify',
450 456 opts=[
451 457 (
452 458 b'',
453 459 b'large',
454 460 None,
455 461 _(b'verify that all largefiles in current revision exists'),
456 462 ),
457 463 (
458 464 b'',
459 465 b'lfa',
460 466 None,
461 467 _(b'verify largefiles in all revisions, not just current'),
462 468 ),
463 469 (
464 470 b'',
465 471 b'lfc',
466 472 None,
467 473 _(b'verify local largefile contents, not just existence'),
468 474 ),
469 475 ],
470 476 )
471 477 def overrideverify(orig, ui, repo, *pats, **opts):
472 478 large = opts.pop('large', False)
473 479 all = opts.pop('lfa', False)
474 480 contents = opts.pop('lfc', False)
475 481
476 482 result = orig(ui, repo, *pats, **opts)
477 483 if large or all or contents:
478 484 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
479 485 return result
480 486
481 487
482 488 @eh.wrapcommand(
483 489 b'debugstate',
484 490 opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
485 491 )
486 492 def overridedebugstate(orig, ui, repo, *pats, **opts):
487 493 large = opts.pop('large', False)
488 494 if large:
489 495
490 496 class fakerepo(object):
491 497 dirstate = lfutil.openlfdirstate(ui, repo)
492 498
493 499 orig(ui, fakerepo, *pats, **opts)
494 500 else:
495 501 orig(ui, repo, *pats, **opts)
496 502
497 503
498 504 # Before starting the manifest merge, merge.updates will call
499 505 # _checkunknownfile to check if there are any files in the merged-in
500 506 # changeset that collide with unknown files in the working copy.
501 507 #
502 508 # The largefiles are seen as unknown, so this prevents us from merging
503 509 # in a file 'foo' if we already have a largefile with the same name.
504 510 #
505 511 # The overridden function filters the unknown files by removing any
506 512 # largefiles. This makes the merge proceed and we can then handle this
507 513 # case further in the overridden calculateupdates function below.
508 514 @eh.wrapfunction(merge, b'_checkunknownfile')
509 515 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
510 516 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
511 517 return False
512 518 return origfn(repo, wctx, mctx, f, f2)
513 519
514 520
515 521 # The manifest merge handles conflicts on the manifest level. We want
516 522 # to handle changes in largefile-ness of files at this level too.
517 523 #
518 524 # The strategy is to run the original calculateupdates and then process
519 525 # the action list it outputs. There are two cases we need to deal with:
520 526 #
521 527 # 1. Normal file in p1, largefile in p2. Here the largefile is
522 528 # detected via its standin file, which will enter the working copy
523 529 # with a "get" action. It is not "merge" since the standin is all
524 530 # Mercurial is concerned with at this level -- the link to the
525 531 # existing normal file is not relevant here.
526 532 #
527 533 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
528 534 # since the largefile will be present in the working copy and
529 535 # different from the normal file in p2. Mercurial therefore
530 536 # triggers a merge action.
531 537 #
532 538 # In both cases, we prompt the user and emit new actions to either
533 539 # remove the standin (if the normal file was kept) or to remove the
534 540 # normal file and get the standin (if the largefile was kept). The
535 541 # default prompt answer is to use the largefile version since it was
536 542 # presumably changed on purpose.
537 543 #
538 544 # Finally, the merge.applyupdates function will then take care of
539 545 # writing the files into the working copy and lfcommands.updatelfiles
540 546 # will update the largefiles.
541 547 @eh.wrapfunction(merge, b'calculateupdates')
542 548 def overridecalculateupdates(
543 549 origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
544 550 ):
545 551 overwrite = force and not branchmerge
546 552 mresult = origfn(
547 553 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
548 554 )
549 555
550 556 if overwrite:
551 557 return mresult
552 558
553 559 # Convert to dictionary with filename as key and action as value.
554 560 lfiles = set()
555 561 for f in mresult.files():
556 562 splitstandin = lfutil.splitstandin(f)
557 563 if splitstandin is not None and splitstandin in p1:
558 564 lfiles.add(splitstandin)
559 565 elif lfutil.standin(f) in p1:
560 566 lfiles.add(f)
561 567
562 568 for lfile in sorted(lfiles):
563 569 standin = lfutil.standin(lfile)
564 570 (lm, largs, lmsg) = mresult.getfile(lfile, (None, None, None))
565 571 (sm, sargs, smsg) = mresult.getfile(standin, (None, None, None))
566 if sm in (b'g', b'dc') and lm != b'r':
567 if sm == b'dc':
572
573 if sm in (ACTION_GET, ACTION_DELETED_CHANGED) and lm != ACTION_REMOVE:
574 if sm == ACTION_DELETED_CHANGED:
568 575 f1, f2, fa, move, anc = sargs
569 576 sargs = (p2[f2].flags(), False)
570 577 # Case 1: normal file in the working copy, largefile in
571 578 # the second parent
572 579 usermsg = (
573 580 _(
574 581 b'remote turned local normal file %s into a largefile\n'
575 582 b'use (l)argefile or keep (n)ormal file?'
576 583 b'$$ &Largefile $$ &Normal file'
577 584 )
578 585 % lfile
579 586 )
580 587 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
581 mresult.addfile(lfile, b'r', None, b'replaced by standin')
582 mresult.addfile(standin, b'g', sargs, b'replaces standin')
588 mresult.addfile(
589 lfile, ACTION_REMOVE, None, b'replaced by standin'
590 )
591 mresult.addfile(standin, ACTION_GET, sargs, b'replaces standin')
583 592 else: # keep local normal file
584 mresult.addfile(lfile, b'k', None, b'replaces standin')
593 mresult.addfile(lfile, ACTION_KEEP, None, b'replaces standin')
585 594 if branchmerge:
586 595 mresult.addfile(
587 596 standin,
588 b'k',
597 ACTION_KEEP,
589 598 None,
590 599 b'replaced by non-standin',
591 600 )
592 601 else:
593 602 mresult.addfile(
594 603 standin,
595 b'r',
604 ACTION_REMOVE,
596 605 None,
597 606 b'replaced by non-standin',
598 607 )
599 elif lm in (b'g', b'dc') and sm != b'r':
600 if lm == b'dc':
608 if lm in (ACTION_GET, ACTION_DELETED_CHANGED) and sm != ACTION_REMOVE:
609 if lm == ACTION_DELETED_CHANGED:
601 610 f1, f2, fa, move, anc = largs
602 611 largs = (p2[f2].flags(), False)
603 612 # Case 2: largefile in the working copy, normal file in
604 613 # the second parent
605 614 usermsg = (
606 615 _(
607 616 b'remote turned local largefile %s into a normal file\n'
608 617 b'keep (l)argefile or use (n)ormal file?'
609 618 b'$$ &Largefile $$ &Normal file'
610 619 )
611 620 % lfile
612 621 )
613 622 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
614 623 if branchmerge:
615 624 # largefile can be restored from standin safely
616 625 mresult.addfile(
617 626 lfile,
618 b'k',
627 ACTION_KEEP,
619 628 None,
620 629 b'replaced by standin',
621 630 )
622 mresult.addfile(standin, b'k', None, b'replaces standin')
631 mresult.addfile(
632 standin, ACTION_KEEP, None, b'replaces standin'
633 )
623 634 else:
624 635 # "lfile" should be marked as "removed" without
625 636 # removal of itself
626 637 mresult.addfile(
627 638 lfile,
628 639 MERGE_ACTION_LARGEFILE_MARK_REMOVED,
629 640 None,
630 641 b'forget non-standin largefile',
631 642 )
632 643
633 644 # linear-merge should treat this largefile as 're-added'
634 mresult.addfile(standin, b'a', None, b'keep standin')
645 mresult.addfile(standin, ACTION_ADD, None, b'keep standin')
635 646 else: # pick remote normal file
636 mresult.addfile(lfile, b'g', largs, b'replaces standin')
647 mresult.addfile(lfile, ACTION_GET, largs, b'replaces standin')
637 648 mresult.addfile(
638 649 standin,
639 b'r',
650 ACTION_REMOVE,
640 651 None,
641 652 b'replaced by non-standin',
642 653 )
643 654
644 655 return mresult
645 656
646 657
647 658 @eh.wrapfunction(mergestatemod, b'recordupdates')
648 659 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
649 660 if MERGE_ACTION_LARGEFILE_MARK_REMOVED in actions:
650 661 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
651 662 with lfdirstate.parentchange():
652 663 for lfile, args, msg in actions[
653 664 MERGE_ACTION_LARGEFILE_MARK_REMOVED
654 665 ]:
655 666 # this should be executed before 'orig', to execute 'remove'
656 667 # before all other actions
657 668 repo.dirstate.update_file(
658 669 lfile, p1_tracked=True, wc_tracked=False
659 670 )
660 671 # make sure lfile doesn't get synclfdirstate'd as normal
661 672 lfdirstate.update_file(lfile, p1_tracked=False, wc_tracked=True)
662 673 lfdirstate.write(repo.currenttransaction())
663 674
664 675 return orig(repo, actions, branchmerge, getfiledata)
665 676
666 677
667 678 # Override filemerge to prompt the user about how they wish to merge
668 679 # largefiles. This will handle identical edits without prompting the user.
669 @eh.wrapfunction(filemerge, b'_filemerge')
680 @eh.wrapfunction(filemerge, b'filemerge')
670 681 def overridefilemerge(
671 origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
682 origfn, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
672 683 ):
673 684 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
674 return origfn(
675 premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
676 )
685 return origfn(repo, wctx, mynode, orig, fcd, fco, fca, labels=labels)
677 686
678 687 ahash = lfutil.readasstandin(fca).lower()
679 688 dhash = lfutil.readasstandin(fcd).lower()
680 689 ohash = lfutil.readasstandin(fco).lower()
681 690 if (
682 691 ohash != ahash
683 692 and ohash != dhash
684 693 and (
685 694 dhash == ahash
686 695 or repo.ui.promptchoice(
687 696 _(
688 697 b'largefile %s has a merge conflict\nancestor was %s\n'
689 698 b'you can keep (l)ocal %s or take (o)ther %s.\n'
690 699 b'what do you want to do?'
691 700 b'$$ &Local $$ &Other'
692 701 )
693 702 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
694 703 0,
695 704 )
696 705 == 1
697 706 )
698 707 ):
699 708 repo.wwrite(fcd.path(), fco.data(), fco.flags())
700 return True, 0, False
709 return 0, False
701 710
702 711
703 712 @eh.wrapfunction(copiesmod, b'pathcopies')
704 713 def copiespathcopies(orig, ctx1, ctx2, match=None):
705 714 copies = orig(ctx1, ctx2, match=match)
706 715 updated = {}
707 716
708 717 for k, v in pycompat.iteritems(copies):
709 718 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
710 719
711 720 return updated
712 721
713 722
714 723 # Copy first changes the matchers to match standins instead of
715 724 # largefiles. Then it overrides util.copyfile in that function it
716 725 # checks if the destination largefile already exists. It also keeps a
717 726 # list of copied files so that the largefiles can be copied and the
718 727 # dirstate updated.
719 728 @eh.wrapfunction(cmdutil, b'copy')
720 729 def overridecopy(orig, ui, repo, pats, opts, rename=False):
721 730 # doesn't remove largefile on rename
722 731 if len(pats) < 2:
723 732 # this isn't legal, let the original function deal with it
724 733 return orig(ui, repo, pats, opts, rename)
725 734
726 735 # This could copy both lfiles and normal files in one command,
727 736 # but we don't want to do that. First replace their matcher to
728 737 # only match normal files and run it, then replace it to just
729 738 # match largefiles and run it again.
730 739 nonormalfiles = False
731 740 nolfiles = False
732 741 manifest = repo[None].manifest()
733 742
734 743 def normalfilesmatchfn(
735 744 orig,
736 745 ctx,
737 746 pats=(),
738 747 opts=None,
739 748 globbed=False,
740 749 default=b'relpath',
741 750 badfn=None,
742 751 ):
743 752 if opts is None:
744 753 opts = {}
745 754 match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
746 755 return composenormalfilematcher(match, manifest)
747 756
748 757 with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
749 758 try:
750 759 result = orig(ui, repo, pats, opts, rename)
751 760 except error.Abort as e:
752 761 if e.message != _(b'no files to copy'):
753 762 raise e
754 763 else:
755 764 nonormalfiles = True
756 765 result = 0
757 766
758 767 # The first rename can cause our current working directory to be removed.
759 768 # In that case there is nothing left to copy/rename so just quit.
760 769 try:
761 770 repo.getcwd()
762 771 except OSError:
763 772 return result
764 773
765 774 def makestandin(relpath):
766 775 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
767 776 return repo.wvfs.join(lfutil.standin(path))
768 777
769 778 fullpats = scmutil.expandpats(pats)
770 779 dest = fullpats[-1]
771 780
772 781 if os.path.isdir(dest):
773 782 if not os.path.isdir(makestandin(dest)):
774 783 os.makedirs(makestandin(dest))
775 784
776 785 try:
777 786 # When we call orig below it creates the standins but we don't add
778 787 # them to the dir state until later so lock during that time.
779 788 wlock = repo.wlock()
780 789
781 790 manifest = repo[None].manifest()
782 791
783 792 def overridematch(
784 793 orig,
785 794 ctx,
786 795 pats=(),
787 796 opts=None,
788 797 globbed=False,
789 798 default=b'relpath',
790 799 badfn=None,
791 800 ):
792 801 if opts is None:
793 802 opts = {}
794 803 newpats = []
795 804 # The patterns were previously mangled to add the standin
796 805 # directory; we need to remove that now
797 806 for pat in pats:
798 807 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
799 808 newpats.append(pat.replace(lfutil.shortname, b''))
800 809 else:
801 810 newpats.append(pat)
802 811 match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
803 812 m = copy.copy(match)
804 813 lfile = lambda f: lfutil.standin(f) in manifest
805 814 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
806 815 m._fileset = set(m._files)
807 816 origmatchfn = m.matchfn
808 817
809 818 def matchfn(f):
810 819 lfile = lfutil.splitstandin(f)
811 820 return (
812 821 lfile is not None
813 822 and (f in manifest)
814 823 and origmatchfn(lfile)
815 824 or None
816 825 )
817 826
818 827 m.matchfn = matchfn
819 828 return m
820 829
821 830 listpats = []
822 831 for pat in pats:
823 832 if matchmod.patkind(pat) is not None:
824 833 listpats.append(pat)
825 834 else:
826 835 listpats.append(makestandin(pat))
827 836
828 837 copiedfiles = []
829 838
830 839 def overridecopyfile(orig, src, dest, *args, **kwargs):
831 840 if lfutil.shortname in src and dest.startswith(
832 841 repo.wjoin(lfutil.shortname)
833 842 ):
834 843 destlfile = dest.replace(lfutil.shortname, b'')
835 844 if not opts[b'force'] and os.path.exists(destlfile):
836 845 raise IOError(
837 846 b'', _(b'destination largefile already exists')
838 847 )
839 848 copiedfiles.append((src, dest))
840 849 orig(src, dest, *args, **kwargs)
841 850
842 851 with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
843 852 with extensions.wrappedfunction(scmutil, b'match', overridematch):
844 853 result += orig(ui, repo, listpats, opts, rename)
845 854
846 855 lfdirstate = lfutil.openlfdirstate(ui, repo)
847 856 for (src, dest) in copiedfiles:
848 857 if lfutil.shortname in src and dest.startswith(
849 858 repo.wjoin(lfutil.shortname)
850 859 ):
851 860 srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
852 861 destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
853 862 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
854 863 if not os.path.isdir(destlfiledir):
855 864 os.makedirs(destlfiledir)
856 865 if rename:
857 866 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
858 867
859 868 # The file is gone, but this deletes any empty parent
860 869 # directories as a side-effect.
861 870 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
862 871 lfdirstate.set_untracked(srclfile)
863 872 else:
864 873 util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
865 874
866 875 lfdirstate.set_tracked(destlfile)
867 876 lfdirstate.write(repo.currenttransaction())
868 877 except error.Abort as e:
869 878 if e.message != _(b'no files to copy'):
870 879 raise e
871 880 else:
872 881 nolfiles = True
873 882 finally:
874 883 wlock.release()
875 884
876 885 if nolfiles and nonormalfiles:
877 886 raise error.Abort(_(b'no files to copy'))
878 887
879 888 return result
880 889
881 890
882 891 # When the user calls revert, we have to be careful to not revert any
883 892 # changes to other largefiles accidentally. This means we have to keep
884 893 # track of the largefiles that are being reverted so we only pull down
885 894 # the necessary largefiles.
886 895 #
887 896 # Standins are only updated (to match the hash of largefiles) before
888 897 # commits. Update the standins then run the original revert, changing
889 898 # the matcher to hit standins instead of largefiles. Based on the
890 899 # resulting standins update the largefiles.
891 900 @eh.wrapfunction(cmdutil, b'revert')
892 901 def overriderevert(orig, ui, repo, ctx, *pats, **opts):
893 902 # Because we put the standins in a bad state (by updating them)
894 903 # and then return them to a correct state we need to lock to
895 904 # prevent others from changing them in their incorrect state.
896 905 with repo.wlock():
897 906 lfdirstate = lfutil.openlfdirstate(ui, repo)
898 907 s = lfutil.lfdirstatestatus(lfdirstate, repo)
899 908 lfdirstate.write(repo.currenttransaction())
900 909 for lfile in s.modified:
901 910 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
902 911 for lfile in s.deleted:
903 912 fstandin = lfutil.standin(lfile)
904 913 if repo.wvfs.exists(fstandin):
905 914 repo.wvfs.unlink(fstandin)
906 915
907 916 oldstandins = lfutil.getstandinsstate(repo)
908 917
909 918 def overridematch(
910 919 orig,
911 920 mctx,
912 921 pats=(),
913 922 opts=None,
914 923 globbed=False,
915 924 default=b'relpath',
916 925 badfn=None,
917 926 ):
918 927 if opts is None:
919 928 opts = {}
920 929 match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
921 930 m = copy.copy(match)
922 931
923 932 # revert supports recursing into subrepos, and though largefiles
924 933 # currently doesn't work correctly in that case, this match is
925 934 # called, so the lfdirstate above may not be the correct one for
926 935 # this invocation of match.
927 936 lfdirstate = lfutil.openlfdirstate(
928 937 mctx.repo().ui, mctx.repo(), False
929 938 )
930 939
931 940 wctx = repo[None]
932 941 matchfiles = []
933 942 for f in m._files:
934 943 standin = lfutil.standin(f)
935 944 if standin in ctx or standin in mctx:
936 945 matchfiles.append(standin)
937 946 elif standin in wctx or lfdirstate.get_entry(f).removed:
938 947 continue
939 948 else:
940 949 matchfiles.append(f)
941 950 m._files = matchfiles
942 951 m._fileset = set(m._files)
943 952 origmatchfn = m.matchfn
944 953
945 954 def matchfn(f):
946 955 lfile = lfutil.splitstandin(f)
947 956 if lfile is not None:
948 957 return origmatchfn(lfile) and (f in ctx or f in mctx)
949 958 return origmatchfn(f)
950 959
951 960 m.matchfn = matchfn
952 961 return m
953 962
954 963 with extensions.wrappedfunction(scmutil, b'match', overridematch):
955 964 orig(ui, repo, ctx, *pats, **opts)
956 965
957 966 newstandins = lfutil.getstandinsstate(repo)
958 967 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
959 968 # lfdirstate should be 'normallookup'-ed for updated files,
960 969 # because reverting doesn't touch dirstate for 'normal' files
961 970 # when target revision is explicitly specified: in such case,
962 971 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
963 972 # of target (standin) file.
964 973 lfcommands.updatelfiles(
965 974 ui, repo, filelist, printmessage=False, normallookup=True
966 975 )
967 976
968 977
969 978 # after pulling changesets, we need to take some extra care to get
970 979 # largefiles updated remotely
971 980 @eh.wrapcommand(
972 981 b'pull',
973 982 opts=[
974 983 (
975 984 b'',
976 985 b'all-largefiles',
977 986 None,
978 987 _(b'download all pulled versions of largefiles (DEPRECATED)'),
979 988 ),
980 989 (
981 990 b'',
982 991 b'lfrev',
983 992 [],
984 993 _(b'download largefiles for these revisions'),
985 994 _(b'REV'),
986 995 ),
987 996 ],
988 997 )
989 998 def overridepull(orig, ui, repo, source=None, **opts):
990 999 revsprepull = len(repo)
991 1000 if not source:
992 1001 source = b'default'
993 1002 repo.lfpullsource = source
994 1003 result = orig(ui, repo, source, **opts)
995 1004 revspostpull = len(repo)
996 1005 lfrevs = opts.get('lfrev', [])
997 1006 if opts.get('all_largefiles'):
998 1007 lfrevs.append(b'pulled()')
999 1008 if lfrevs and revspostpull > revsprepull:
1000 1009 numcached = 0
1001 1010 repo.firstpulled = revsprepull # for pulled() revset expression
1002 1011 try:
1003 1012 for rev in logcmdutil.revrange(repo, lfrevs):
1004 1013 ui.note(_(b'pulling largefiles for revision %d\n') % rev)
1005 1014 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
1006 1015 numcached += len(cached)
1007 1016 finally:
1008 1017 del repo.firstpulled
1009 1018 ui.status(_(b"%d largefiles cached\n") % numcached)
1010 1019 return result
1011 1020
1012 1021
1013 1022 @eh.wrapcommand(
1014 1023 b'push',
1015 1024 opts=[
1016 1025 (
1017 1026 b'',
1018 1027 b'lfrev',
1019 1028 [],
1020 1029 _(b'upload largefiles for these revisions'),
1021 1030 _(b'REV'),
1022 1031 )
1023 1032 ],
1024 1033 )
1025 1034 def overridepush(orig, ui, repo, *args, **kwargs):
1026 1035 """Override push command and store --lfrev parameters in opargs"""
1027 1036 lfrevs = kwargs.pop('lfrev', None)
1028 1037 if lfrevs:
1029 1038 opargs = kwargs.setdefault('opargs', {})
1030 1039 opargs[b'lfrevs'] = logcmdutil.revrange(repo, lfrevs)
1031 1040 return orig(ui, repo, *args, **kwargs)
1032 1041
1033 1042
1034 1043 @eh.wrapfunction(exchange, b'pushoperation')
1035 1044 def exchangepushoperation(orig, *args, **kwargs):
1036 1045 """Override pushoperation constructor and store lfrevs parameter"""
1037 1046 lfrevs = kwargs.pop('lfrevs', None)
1038 1047 pushop = orig(*args, **kwargs)
1039 1048 pushop.lfrevs = lfrevs
1040 1049 return pushop
1041 1050
1042 1051
1043 1052 @eh.revsetpredicate(b'pulled()')
1044 1053 def pulledrevsetsymbol(repo, subset, x):
1045 1054 """Changesets that just has been pulled.
1046 1055
1047 1056 Only available with largefiles from pull --lfrev expressions.
1048 1057
1049 1058 .. container:: verbose
1050 1059
1051 1060 Some examples:
1052 1061
1053 1062 - pull largefiles for all new changesets::
1054 1063
1055 1064 hg pull -lfrev "pulled()"
1056 1065
1057 1066 - pull largefiles for all new branch heads::
1058 1067
1059 1068 hg pull -lfrev "head(pulled()) and not closed()"
1060 1069
1061 1070 """
1062 1071
1063 1072 try:
1064 1073 firstpulled = repo.firstpulled
1065 1074 except AttributeError:
1066 1075 raise error.Abort(_(b"pulled() only available in --lfrev"))
1067 1076 return smartset.baseset([r for r in subset if r >= firstpulled])
1068 1077
1069 1078
1070 1079 @eh.wrapcommand(
1071 1080 b'clone',
1072 1081 opts=[
1073 1082 (
1074 1083 b'',
1075 1084 b'all-largefiles',
1076 1085 None,
1077 1086 _(b'download all versions of all largefiles'),
1078 1087 )
1079 1088 ],
1080 1089 )
1081 1090 def overrideclone(orig, ui, source, dest=None, **opts):
1082 1091 d = dest
1083 1092 if d is None:
1084 1093 d = hg.defaultdest(source)
1085 1094 if opts.get('all_largefiles') and not hg.islocal(d):
1086 1095 raise error.Abort(
1087 1096 _(b'--all-largefiles is incompatible with non-local destination %s')
1088 1097 % d
1089 1098 )
1090 1099
1091 1100 return orig(ui, source, dest, **opts)
1092 1101
1093 1102
1094 1103 @eh.wrapfunction(hg, b'clone')
1095 1104 def hgclone(orig, ui, opts, *args, **kwargs):
1096 1105 result = orig(ui, opts, *args, **kwargs)
1097 1106
1098 1107 if result is not None:
1099 1108 sourcerepo, destrepo = result
1100 1109 repo = destrepo.local()
1101 1110
1102 1111 # When cloning to a remote repo (like through SSH), no repo is available
1103 1112 # from the peer. Therefore the largefiles can't be downloaded and the
1104 1113 # hgrc can't be updated.
1105 1114 if not repo:
1106 1115 return result
1107 1116
1108 1117 # Caching is implicitly limited to 'rev' option, since the dest repo was
1109 1118 # truncated at that point. The user may expect a download count with
1110 1119 # this option, so attempt whether or not this is a largefile repo.
1111 1120 if opts.get(b'all_largefiles'):
1112 1121 success, missing = lfcommands.downloadlfiles(ui, repo)
1113 1122
1114 1123 if missing != 0:
1115 1124 return None
1116 1125
1117 1126 return result
1118 1127
1119 1128
1120 1129 @eh.wrapcommand(b'rebase', extension=b'rebase')
1121 1130 def overriderebasecmd(orig, ui, repo, **opts):
1122 1131 if not util.safehasattr(repo, b'_largefilesenabled'):
1123 1132 return orig(ui, repo, **opts)
1124 1133
1125 1134 resuming = opts.get('continue')
1126 1135 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1127 1136 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1128 1137 try:
1129 1138 with ui.configoverride(
1130 1139 {(b'rebase', b'experimental.inmemory'): False}, b"largefiles"
1131 1140 ):
1132 1141 return orig(ui, repo, **opts)
1133 1142 finally:
1134 1143 repo._lfstatuswriters.pop()
1135 1144 repo._lfcommithooks.pop()
1136 1145
1137 1146
1138 1147 @eh.extsetup
1139 1148 def overriderebase(ui):
1140 1149 try:
1141 1150 rebase = extensions.find(b'rebase')
1142 1151 except KeyError:
1143 1152 pass
1144 1153 else:
1145 1154
1146 1155 def _dorebase(orig, *args, **kwargs):
1147 1156 kwargs['inmemory'] = False
1148 1157 return orig(*args, **kwargs)
1149 1158
1150 1159 extensions.wrapfunction(rebase, b'_dorebase', _dorebase)
1151 1160
1152 1161
1153 1162 @eh.wrapcommand(b'archive')
1154 1163 def overridearchivecmd(orig, ui, repo, dest, **opts):
1155 1164 with lfstatus(repo.unfiltered()):
1156 1165 return orig(ui, repo.unfiltered(), dest, **opts)
1157 1166
1158 1167
1159 1168 @eh.wrapfunction(webcommands, b'archive')
1160 1169 def hgwebarchive(orig, web):
1161 1170 with lfstatus(web.repo):
1162 1171 return orig(web)
1163 1172
1164 1173
1165 1174 @eh.wrapfunction(archival, b'archive')
1166 1175 def overridearchive(
1167 1176 orig,
1168 1177 repo,
1169 1178 dest,
1170 1179 node,
1171 1180 kind,
1172 1181 decode=True,
1173 1182 match=None,
1174 1183 prefix=b'',
1175 1184 mtime=None,
1176 1185 subrepos=None,
1177 1186 ):
1178 1187 # For some reason setting repo.lfstatus in hgwebarchive only changes the
1179 1188 # unfiltered repo's attr, so check that as well.
1180 1189 if not repo.lfstatus and not repo.unfiltered().lfstatus:
1181 1190 return orig(
1182 1191 repo, dest, node, kind, decode, match, prefix, mtime, subrepos
1183 1192 )
1184 1193
1185 1194 # No need to lock because we are only reading history and
1186 1195 # largefile caches, neither of which are modified.
1187 1196 if node is not None:
1188 1197 lfcommands.cachelfiles(repo.ui, repo, node)
1189 1198
1190 1199 if kind not in archival.archivers:
1191 1200 raise error.Abort(_(b"unknown archive type '%s'") % kind)
1192 1201
1193 1202 ctx = repo[node]
1194 1203
1195 1204 if kind == b'files':
1196 1205 if prefix:
1197 1206 raise error.Abort(_(b'cannot give prefix when archiving to files'))
1198 1207 else:
1199 1208 prefix = archival.tidyprefix(dest, kind, prefix)
1200 1209
1201 1210 def write(name, mode, islink, getdata):
1202 1211 if match and not match(name):
1203 1212 return
1204 1213 data = getdata()
1205 1214 if decode:
1206 1215 data = repo.wwritedata(name, data)
1207 1216 archiver.addfile(prefix + name, mode, islink, data)
1208 1217
1209 1218 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
1210 1219
1211 1220 if repo.ui.configbool(b"ui", b"archivemeta"):
1212 1221 write(
1213 1222 b'.hg_archival.txt',
1214 1223 0o644,
1215 1224 False,
1216 1225 lambda: archival.buildmetadata(ctx),
1217 1226 )
1218 1227
1219 1228 for f in ctx:
1220 1229 ff = ctx.flags(f)
1221 1230 getdata = ctx[f].data
1222 1231 lfile = lfutil.splitstandin(f)
1223 1232 if lfile is not None:
1224 1233 if node is not None:
1225 1234 path = lfutil.findfile(repo, getdata().strip())
1226 1235
1227 1236 if path is None:
1228 1237 raise error.Abort(
1229 1238 _(
1230 1239 b'largefile %s not found in repo store or system cache'
1231 1240 )
1232 1241 % lfile
1233 1242 )
1234 1243 else:
1235 1244 path = lfile
1236 1245
1237 1246 f = lfile
1238 1247
1239 1248 getdata = lambda: util.readfile(path)
1240 1249 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1241 1250
1242 1251 if subrepos:
1243 1252 for subpath in sorted(ctx.substate):
1244 1253 sub = ctx.workingsub(subpath)
1245 1254 submatch = matchmod.subdirmatcher(subpath, match)
1246 1255 subprefix = prefix + subpath + b'/'
1247 1256
1248 1257 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1249 1258 # infer and possibly set lfstatus in hgsubrepoarchive. That would
1250 1259 # allow only hgsubrepos to set this, instead of the current scheme
1251 1260 # where the parent sets this for the child.
1252 1261 with (
1253 1262 util.safehasattr(sub, '_repo')
1254 1263 and lfstatus(sub._repo)
1255 1264 or util.nullcontextmanager()
1256 1265 ):
1257 1266 sub.archive(archiver, subprefix, submatch)
1258 1267
1259 1268 archiver.done()
1260 1269
1261 1270
1262 1271 @eh.wrapfunction(subrepo.hgsubrepo, b'archive')
1263 1272 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1264 1273 lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
1265 1274 if not lfenabled or not repo._repo.lfstatus:
1266 1275 return orig(repo, archiver, prefix, match, decode)
1267 1276
1268 1277 repo._get(repo._state + (b'hg',))
1269 1278 rev = repo._state[1]
1270 1279 ctx = repo._repo[rev]
1271 1280
1272 1281 if ctx.node() is not None:
1273 1282 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1274 1283
1275 1284 def write(name, mode, islink, getdata):
1276 1285 # At this point, the standin has been replaced with the largefile name,
1277 1286 # so the normal matcher works here without the lfutil variants.
1278 1287 if match and not match(f):
1279 1288 return
1280 1289 data = getdata()
1281 1290 if decode:
1282 1291 data = repo._repo.wwritedata(name, data)
1283 1292
1284 1293 archiver.addfile(prefix + name, mode, islink, data)
1285 1294
1286 1295 for f in ctx:
1287 1296 ff = ctx.flags(f)
1288 1297 getdata = ctx[f].data
1289 1298 lfile = lfutil.splitstandin(f)
1290 1299 if lfile is not None:
1291 1300 if ctx.node() is not None:
1292 1301 path = lfutil.findfile(repo._repo, getdata().strip())
1293 1302
1294 1303 if path is None:
1295 1304 raise error.Abort(
1296 1305 _(
1297 1306 b'largefile %s not found in repo store or system cache'
1298 1307 )
1299 1308 % lfile
1300 1309 )
1301 1310 else:
1302 1311 path = lfile
1303 1312
1304 1313 f = lfile
1305 1314
1306 1315 getdata = lambda: util.readfile(os.path.join(prefix, path))
1307 1316
1308 1317 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
1309 1318
1310 1319 for subpath in sorted(ctx.substate):
1311 1320 sub = ctx.workingsub(subpath)
1312 1321 submatch = matchmod.subdirmatcher(subpath, match)
1313 1322 subprefix = prefix + subpath + b'/'
1314 1323 # TODO: Only hgsubrepo instances have `_repo`, so figure out how to
1315 1324 # infer and possibly set lfstatus at the top of this function. That
1316 1325 # would allow only hgsubrepos to set this, instead of the current scheme
1317 1326 # where the parent sets this for the child.
1318 1327 with (
1319 1328 util.safehasattr(sub, '_repo')
1320 1329 and lfstatus(sub._repo)
1321 1330 or util.nullcontextmanager()
1322 1331 ):
1323 1332 sub.archive(archiver, subprefix, submatch, decode)
1324 1333
1325 1334
1326 1335 # If a largefile is modified, the change is not reflected in its
1327 1336 # standin until a commit. cmdutil.bailifchanged() raises an exception
1328 1337 # if the repo has uncommitted changes. Wrap it to also check if
1329 1338 # largefiles were changed. This is used by bisect, backout and fetch.
1330 1339 @eh.wrapfunction(cmdutil, b'bailifchanged')
1331 1340 def overridebailifchanged(orig, repo, *args, **kwargs):
1332 1341 orig(repo, *args, **kwargs)
1333 1342 with lfstatus(repo):
1334 1343 s = repo.status()
1335 1344 if s.modified or s.added or s.removed or s.deleted:
1336 1345 raise error.Abort(_(b'uncommitted changes'))
1337 1346
1338 1347
1339 1348 @eh.wrapfunction(cmdutil, b'postcommitstatus')
1340 1349 def postcommitstatus(orig, repo, *args, **kwargs):
1341 1350 with lfstatus(repo):
1342 1351 return orig(repo, *args, **kwargs)
1343 1352
1344 1353
1345 1354 @eh.wrapfunction(cmdutil, b'forget')
1346 1355 def cmdutilforget(
1347 1356 orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
1348 1357 ):
1349 1358 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1350 1359 bad, forgot = orig(
1351 1360 ui,
1352 1361 repo,
1353 1362 normalmatcher,
1354 1363 prefix,
1355 1364 uipathfn,
1356 1365 explicitonly,
1357 1366 dryrun,
1358 1367 interactive,
1359 1368 )
1360 1369 m = composelargefilematcher(match, repo[None].manifest())
1361 1370
1362 1371 with lfstatus(repo):
1363 1372 s = repo.status(match=m, clean=True)
1364 1373 manifest = repo[None].manifest()
1365 1374 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1366 1375 forget = [f for f in forget if lfutil.standin(f) in manifest]
1367 1376
1368 1377 for f in forget:
1369 1378 fstandin = lfutil.standin(f)
1370 1379 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1371 1380 ui.warn(
1372 1381 _(b'not removing %s: file is already untracked\n') % uipathfn(f)
1373 1382 )
1374 1383 bad.append(f)
1375 1384
1376 1385 for f in forget:
1377 1386 if ui.verbose or not m.exact(f):
1378 1387 ui.status(_(b'removing %s\n') % uipathfn(f))
1379 1388
1380 1389 # Need to lock because standin files are deleted then removed from the
1381 1390 # repository and we could race in-between.
1382 1391 with repo.wlock():
1383 1392 lfdirstate = lfutil.openlfdirstate(ui, repo)
1384 1393 for f in forget:
1385 1394 lfdirstate.set_untracked(f)
1386 1395 lfdirstate.write(repo.currenttransaction())
1387 1396 standins = [lfutil.standin(f) for f in forget]
1388 1397 for f in standins:
1389 1398 repo.wvfs.unlinkpath(f, ignoremissing=True)
1390 1399 rejected = repo[None].forget(standins)
1391 1400
1392 1401 bad.extend(f for f in rejected if f in m.files())
1393 1402 forgot.extend(f for f in forget if f not in rejected)
1394 1403 return bad, forgot
1395 1404
1396 1405
1397 1406 def _getoutgoings(repo, other, missing, addfunc):
1398 1407 """get pairs of filename and largefile hash in outgoing revisions
1399 1408 in 'missing'.
1400 1409
1401 1410 largefiles already existing on 'other' repository are ignored.
1402 1411
1403 1412 'addfunc' is invoked with each unique pairs of filename and
1404 1413 largefile hash value.
1405 1414 """
1406 1415 knowns = set()
1407 1416 lfhashes = set()
1408 1417
1409 1418 def dedup(fn, lfhash):
1410 1419 k = (fn, lfhash)
1411 1420 if k not in knowns:
1412 1421 knowns.add(k)
1413 1422 lfhashes.add(lfhash)
1414 1423
1415 1424 lfutil.getlfilestoupload(repo, missing, dedup)
1416 1425 if lfhashes:
1417 1426 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1418 1427 for fn, lfhash in knowns:
1419 1428 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1420 1429 addfunc(fn, lfhash)
1421 1430
1422 1431
1423 1432 def outgoinghook(ui, repo, other, opts, missing):
1424 1433 if opts.pop(b'large', None):
1425 1434 lfhashes = set()
1426 1435 if ui.debugflag:
1427 1436 toupload = {}
1428 1437
1429 1438 def addfunc(fn, lfhash):
1430 1439 if fn not in toupload:
1431 1440 toupload[fn] = []
1432 1441 toupload[fn].append(lfhash)
1433 1442 lfhashes.add(lfhash)
1434 1443
1435 1444 def showhashes(fn):
1436 1445 for lfhash in sorted(toupload[fn]):
1437 1446 ui.debug(b' %s\n' % lfhash)
1438 1447
1439 1448 else:
1440 1449 toupload = set()
1441 1450
1442 1451 def addfunc(fn, lfhash):
1443 1452 toupload.add(fn)
1444 1453 lfhashes.add(lfhash)
1445 1454
1446 1455 def showhashes(fn):
1447 1456 pass
1448 1457
1449 1458 _getoutgoings(repo, other, missing, addfunc)
1450 1459
1451 1460 if not toupload:
1452 1461 ui.status(_(b'largefiles: no files to upload\n'))
1453 1462 else:
1454 1463 ui.status(
1455 1464 _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
1456 1465 )
1457 1466 for file in sorted(toupload):
1458 1467 ui.status(lfutil.splitstandin(file) + b'\n')
1459 1468 showhashes(file)
1460 1469 ui.status(b'\n')
1461 1470
1462 1471
1463 1472 @eh.wrapcommand(
1464 1473 b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1465 1474 )
1466 1475 def _outgoingcmd(orig, *args, **kwargs):
1467 1476 # Nothing to do here other than add the extra help option- the hook above
1468 1477 # processes it.
1469 1478 return orig(*args, **kwargs)
1470 1479
1471 1480
1472 1481 def summaryremotehook(ui, repo, opts, changes):
1473 1482 largeopt = opts.get(b'large', False)
1474 1483 if changes is None:
1475 1484 if largeopt:
1476 1485 return (False, True) # only outgoing check is needed
1477 1486 else:
1478 1487 return (False, False)
1479 1488 elif largeopt:
1480 1489 url, branch, peer, outgoing = changes[1]
1481 1490 if peer is None:
1482 1491 # i18n: column positioning for "hg summary"
1483 1492 ui.status(_(b'largefiles: (no remote repo)\n'))
1484 1493 return
1485 1494
1486 1495 toupload = set()
1487 1496 lfhashes = set()
1488 1497
1489 1498 def addfunc(fn, lfhash):
1490 1499 toupload.add(fn)
1491 1500 lfhashes.add(lfhash)
1492 1501
1493 1502 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1494 1503
1495 1504 if not toupload:
1496 1505 # i18n: column positioning for "hg summary"
1497 1506 ui.status(_(b'largefiles: (no files to upload)\n'))
1498 1507 else:
1499 1508 # i18n: column positioning for "hg summary"
1500 1509 ui.status(
1501 1510 _(b'largefiles: %d entities for %d files to upload\n')
1502 1511 % (len(lfhashes), len(toupload))
1503 1512 )
1504 1513
1505 1514
1506 1515 @eh.wrapcommand(
1507 1516 b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
1508 1517 )
1509 1518 def overridesummary(orig, ui, repo, *pats, **opts):
1510 1519 with lfstatus(repo):
1511 1520 orig(ui, repo, *pats, **opts)
1512 1521
1513 1522
1514 1523 @eh.wrapfunction(scmutil, b'addremove')
1515 1524 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
1516 1525 if opts is None:
1517 1526 opts = {}
1518 1527 if not lfutil.islfilesrepo(repo):
1519 1528 return orig(repo, matcher, prefix, uipathfn, opts)
1520 1529 # Get the list of missing largefiles so we can remove them
1521 1530 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1522 unsure, s = lfdirstate.status(
1531 unsure, s, mtime_boundary = lfdirstate.status(
1523 1532 matchmod.always(),
1524 1533 subrepos=[],
1525 1534 ignored=False,
1526 1535 clean=False,
1527 1536 unknown=False,
1528 1537 )
1529 1538
1530 1539 # Call into the normal remove code, but the removing of the standin, we want
1531 1540 # to have handled by original addremove. Monkey patching here makes sure
1532 1541 # we don't remove the standin in the largefiles code, preventing a very
1533 1542 # confused state later.
1534 1543 if s.deleted:
1535 1544 m = copy.copy(matcher)
1536 1545
1537 1546 # The m._files and m._map attributes are not changed to the deleted list
1538 1547 # because that affects the m.exact() test, which in turn governs whether
1539 1548 # or not the file name is printed, and how. Simply limit the original
1540 1549 # matches to those in the deleted status list.
1541 1550 matchfn = m.matchfn
1542 1551 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1543 1552
1544 1553 removelargefiles(
1545 1554 repo.ui,
1546 1555 repo,
1547 1556 True,
1548 1557 m,
1549 1558 uipathfn,
1550 1559 opts.get(b'dry_run'),
1551 1560 **pycompat.strkwargs(opts)
1552 1561 )
1553 1562 # Call into the normal add code, and any files that *should* be added as
1554 1563 # largefiles will be
1555 1564 added, bad = addlargefiles(
1556 1565 repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
1557 1566 )
1558 1567 # Now that we've handled largefiles, hand off to the original addremove
1559 1568 # function to take care of the rest. Make sure it doesn't do anything with
1560 1569 # largefiles by passing a matcher that will ignore them.
1561 1570 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1562 1571 return orig(repo, matcher, prefix, uipathfn, opts)
1563 1572
1564 1573
1565 1574 # Calling purge with --all will cause the largefiles to be deleted.
1566 1575 # Override repo.status to prevent this from happening.
1567 1576 @eh.wrapcommand(b'purge')
1568 1577 def overridepurge(orig, ui, repo, *dirs, **opts):
1569 1578 # XXX Monkey patching a repoview will not work. The assigned attribute will
1570 1579 # be set on the unfiltered repo, but we will only lookup attributes in the
1571 1580 # unfiltered repo if the lookup in the repoview object itself fails. As the
1572 1581 # monkey patched method exists on the repoview class the lookup will not
1573 1582 # fail. As a result, the original version will shadow the monkey patched
1574 1583 # one, defeating the monkey patch.
1575 1584 #
1576 1585 # As a work around we use an unfiltered repo here. We should do something
1577 1586 # cleaner instead.
1578 1587 repo = repo.unfiltered()
1579 1588 oldstatus = repo.status
1580 1589
1581 1590 def overridestatus(
1582 1591 node1=b'.',
1583 1592 node2=None,
1584 1593 match=None,
1585 1594 ignored=False,
1586 1595 clean=False,
1587 1596 unknown=False,
1588 1597 listsubrepos=False,
1589 1598 ):
1590 1599 r = oldstatus(
1591 1600 node1, node2, match, ignored, clean, unknown, listsubrepos
1592 1601 )
1593 1602 lfdirstate = lfutil.openlfdirstate(ui, repo)
1594 1603 unknown = [
1595 1604 f for f in r.unknown if not lfdirstate.get_entry(f).any_tracked
1596 1605 ]
1597 1606 ignored = [
1598 1607 f for f in r.ignored if not lfdirstate.get_entry(f).any_tracked
1599 1608 ]
1600 1609 return scmutil.status(
1601 1610 r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
1602 1611 )
1603 1612
1604 1613 repo.status = overridestatus
1605 1614 orig(ui, repo, *dirs, **opts)
1606 1615 repo.status = oldstatus
1607 1616
1608 1617
1609 1618 @eh.wrapcommand(b'rollback')
1610 1619 def overriderollback(orig, ui, repo, **opts):
1611 1620 with repo.wlock():
1612 1621 before = repo.dirstate.parents()
1613 1622 orphans = {
1614 1623 f
1615 1624 for f in repo.dirstate
1616 1625 if lfutil.isstandin(f) and not repo.dirstate.get_entry(f).removed
1617 1626 }
1618 1627 result = orig(ui, repo, **opts)
1619 1628 after = repo.dirstate.parents()
1620 1629 if before == after:
1621 1630 return result # no need to restore standins
1622 1631
1623 1632 pctx = repo[b'.']
1624 1633 for f in repo.dirstate:
1625 1634 if lfutil.isstandin(f):
1626 1635 orphans.discard(f)
1627 1636 if repo.dirstate.get_entry(f).removed:
1628 1637 repo.wvfs.unlinkpath(f, ignoremissing=True)
1629 1638 elif f in pctx:
1630 1639 fctx = pctx[f]
1631 1640 repo.wwrite(f, fctx.data(), fctx.flags())
1632 1641 else:
1633 1642 # content of standin is not so important in 'a',
1634 1643 # 'm' or 'n' (coming from the 2nd parent) cases
1635 1644 lfutil.writestandin(repo, f, b'', False)
1636 1645 for standin in orphans:
1637 1646 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1638 1647
1639 1648 return result
1640 1649
1641 1650
1642 1651 @eh.wrapcommand(b'transplant', extension=b'transplant')
1643 1652 def overridetransplant(orig, ui, repo, *revs, **opts):
1644 1653 resuming = opts.get('continue')
1645 1654 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1646 1655 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1647 1656 try:
1648 1657 result = orig(ui, repo, *revs, **opts)
1649 1658 finally:
1650 1659 repo._lfstatuswriters.pop()
1651 1660 repo._lfcommithooks.pop()
1652 1661 return result
1653 1662
1654 1663
1655 1664 @eh.wrapcommand(b'cat')
1656 1665 def overridecat(orig, ui, repo, file1, *pats, **opts):
1657 1666 opts = pycompat.byteskwargs(opts)
1658 1667 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'))
1659 1668 err = 1
1660 1669 notbad = set()
1661 1670 m = scmutil.match(ctx, (file1,) + pats, opts)
1662 1671 origmatchfn = m.matchfn
1663 1672
1664 1673 def lfmatchfn(f):
1665 1674 if origmatchfn(f):
1666 1675 return True
1667 1676 lf = lfutil.splitstandin(f)
1668 1677 if lf is None:
1669 1678 return False
1670 1679 notbad.add(lf)
1671 1680 return origmatchfn(lf)
1672 1681
1673 1682 m.matchfn = lfmatchfn
1674 1683 origbadfn = m.bad
1675 1684
1676 1685 def lfbadfn(f, msg):
1677 1686 if not f in notbad:
1678 1687 origbadfn(f, msg)
1679 1688
1680 1689 m.bad = lfbadfn
1681 1690
1682 1691 origvisitdirfn = m.visitdir
1683 1692
1684 1693 def lfvisitdirfn(dir):
1685 1694 if dir == lfutil.shortname:
1686 1695 return True
1687 1696 ret = origvisitdirfn(dir)
1688 1697 if ret:
1689 1698 return ret
1690 1699 lf = lfutil.splitstandin(dir)
1691 1700 if lf is None:
1692 1701 return False
1693 1702 return origvisitdirfn(lf)
1694 1703
1695 1704 m.visitdir = lfvisitdirfn
1696 1705
1697 1706 for f in ctx.walk(m):
1698 1707 with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
1699 1708 lf = lfutil.splitstandin(f)
1700 1709 if lf is None or origmatchfn(f):
1701 1710 # duplicating unreachable code from commands.cat
1702 1711 data = ctx[f].data()
1703 1712 if opts.get(b'decode'):
1704 1713 data = repo.wwritedata(f, data)
1705 1714 fp.write(data)
1706 1715 else:
1707 1716 hash = lfutil.readasstandin(ctx[f])
1708 1717 if not lfutil.inusercache(repo.ui, hash):
1709 1718 store = storefactory.openstore(repo)
1710 1719 success, missing = store.get([(lf, hash)])
1711 1720 if len(success) != 1:
1712 1721 raise error.Abort(
1713 1722 _(
1714 1723 b'largefile %s is not in cache and could not be '
1715 1724 b'downloaded'
1716 1725 )
1717 1726 % lf
1718 1727 )
1719 1728 path = lfutil.usercachepath(repo.ui, hash)
1720 1729 with open(path, b"rb") as fpin:
1721 1730 for chunk in util.filechunkiter(fpin):
1722 1731 fp.write(chunk)
1723 1732 err = 0
1724 1733 return err
1725 1734
1726 1735
1727 1736 @eh.wrapfunction(merge, b'_update')
1728 1737 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
1729 1738 matcher = kwargs.get('matcher', None)
1730 1739 # note if this is a partial update
1731 1740 partial = matcher and not matcher.always()
1732 1741 with repo.wlock():
1733 1742 # branch | | |
1734 1743 # merge | force | partial | action
1735 1744 # -------+-------+---------+--------------
1736 1745 # x | x | x | linear-merge
1737 1746 # o | x | x | branch-merge
1738 1747 # x | o | x | overwrite (as clean update)
1739 1748 # o | o | x | force-branch-merge (*1)
1740 1749 # x | x | o | (*)
1741 1750 # o | x | o | (*)
1742 1751 # x | o | o | overwrite (as revert)
1743 1752 # o | o | o | (*)
1744 1753 #
1745 1754 # (*) don't care
1746 1755 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1747 1756
1748 1757 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1749 unsure, s = lfdirstate.status(
1758 unsure, s, mtime_boundary = lfdirstate.status(
1750 1759 matchmod.always(),
1751 1760 subrepos=[],
1752 1761 ignored=False,
1753 1762 clean=True,
1754 1763 unknown=False,
1755 1764 )
1756 1765 oldclean = set(s.clean)
1757 1766 pctx = repo[b'.']
1758 1767 dctx = repo[node]
1759 1768 for lfile in unsure + s.modified:
1760 1769 lfileabs = repo.wvfs.join(lfile)
1761 1770 if not repo.wvfs.exists(lfileabs):
1762 1771 continue
1763 1772 lfhash = lfutil.hashfile(lfileabs)
1764 1773 standin = lfutil.standin(lfile)
1765 1774 lfutil.writestandin(
1766 1775 repo, standin, lfhash, lfutil.getexecutable(lfileabs)
1767 1776 )
1768 1777 if standin in pctx and lfhash == lfutil.readasstandin(
1769 1778 pctx[standin]
1770 1779 ):
1771 1780 oldclean.add(lfile)
1772 1781 for lfile in s.added:
1773 1782 fstandin = lfutil.standin(lfile)
1774 1783 if fstandin not in dctx:
1775 1784 # in this case, content of standin file is meaningless
1776 1785 # (in dctx, lfile is unknown, or normal file)
1777 1786 continue
1778 1787 lfutil.updatestandin(repo, lfile, fstandin)
1779 1788 # mark all clean largefiles as dirty, just in case the update gets
1780 1789 # interrupted before largefiles and lfdirstate are synchronized
1781 1790 for lfile in oldclean:
1782 1791 lfdirstate.set_possibly_dirty(lfile)
1783 1792 lfdirstate.write(repo.currenttransaction())
1784 1793
1785 1794 oldstandins = lfutil.getstandinsstate(repo)
1786 1795 wc = kwargs.get('wc')
1787 1796 if wc and wc.isinmemory():
1788 1797 # largefiles is not a good candidate for in-memory merge (large
1789 1798 # files, custom dirstate, matcher usage).
1790 1799 raise error.ProgrammingError(
1791 1800 b'largefiles is not compatible with in-memory merge'
1792 1801 )
1793 1802 with lfdirstate.parentchange():
1794 1803 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1795 1804
1796 1805 newstandins = lfutil.getstandinsstate(repo)
1797 1806 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1798 1807
1799 1808 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1800 1809 # all the ones that didn't change as clean
1801 1810 for lfile in oldclean.difference(filelist):
1802 1811 lfdirstate.update_file(lfile, p1_tracked=True, wc_tracked=True)
1803 1812 lfdirstate.write(repo.currenttransaction())
1804 1813
1805 1814 if branchmerge or force or partial:
1806 1815 filelist.extend(s.deleted + s.removed)
1807 1816
1808 1817 lfcommands.updatelfiles(
1809 1818 repo.ui, repo, filelist=filelist, normallookup=partial
1810 1819 )
1811 1820
1812 1821 return result
1813 1822
1814 1823
1815 1824 @eh.wrapfunction(scmutil, b'marktouched')
1816 1825 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1817 1826 result = orig(repo, files, *args, **kwargs)
1818 1827
1819 1828 filelist = []
1820 1829 for f in files:
1821 1830 lf = lfutil.splitstandin(f)
1822 1831 if lf is not None:
1823 1832 filelist.append(lf)
1824 1833 if filelist:
1825 1834 lfcommands.updatelfiles(
1826 1835 repo.ui,
1827 1836 repo,
1828 1837 filelist=filelist,
1829 1838 printmessage=False,
1830 1839 normallookup=True,
1831 1840 )
1832 1841
1833 1842 return result
1834 1843
1835 1844
1836 1845 @eh.wrapfunction(upgrade_actions, b'preservedrequirements')
1837 1846 @eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
1838 1847 def upgraderequirements(orig, repo):
1839 1848 reqs = orig(repo)
1840 1849 if b'largefiles' in repo.requirements:
1841 1850 reqs.add(b'largefiles')
1842 1851 return reqs
1843 1852
1844 1853
1845 1854 _lfscheme = b'largefile://'
1846 1855
1847 1856
1848 1857 @eh.wrapfunction(urlmod, b'open')
1849 1858 def openlargefile(orig, ui, url_, data=None, **kwargs):
1850 1859 if url_.startswith(_lfscheme):
1851 1860 if data:
1852 1861 msg = b"cannot use data on a 'largefile://' url"
1853 1862 raise error.ProgrammingError(msg)
1854 1863 lfid = url_[len(_lfscheme) :]
1855 1864 return storefactory.getlfile(ui, lfid)
1856 1865 else:
1857 1866 return orig(ui, url_, data=data, **kwargs)
@@ -1,456 +1,468 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''setup for largefiles repositories: reposetup'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13
14 14 from mercurial.i18n import _
15 15
16 16 from mercurial import (
17 17 error,
18 18 extensions,
19 19 localrepo,
20 20 match as matchmod,
21 21 scmutil,
22 22 util,
23 23 )
24 24
25 from mercurial.dirstateutils import timestamp
26
25 27 from . import (
26 28 lfcommands,
27 29 lfutil,
28 30 )
29 31
30 32
31 33 def reposetup(ui, repo):
32 34 # wire repositories should be given new wireproto functions
33 35 # by "proto.wirereposetup()" via "hg.wirepeersetupfuncs"
34 36 if not repo.local():
35 37 return
36 38
37 39 class lfilesrepo(repo.__class__):
38 40 # the mark to examine whether "repo" object enables largefiles or not
39 41 _largefilesenabled = True
40 42
41 43 lfstatus = False
42 44
43 45 # When lfstatus is set, return a context that gives the names
44 46 # of largefiles instead of their corresponding standins and
45 47 # identifies the largefiles as always binary, regardless of
46 48 # their actual contents.
47 49 def __getitem__(self, changeid):
48 50 ctx = super(lfilesrepo, self).__getitem__(changeid)
49 51 if self.lfstatus:
50 52
51 53 def files(orig):
52 54 filenames = orig()
53 55 return [lfutil.splitstandin(f) or f for f in filenames]
54 56
55 57 extensions.wrapfunction(ctx, 'files', files)
56 58
57 59 def manifest(orig):
58 60 man1 = orig()
59 61
60 62 class lfilesmanifest(man1.__class__):
61 63 def __contains__(self, filename):
62 64 orig = super(lfilesmanifest, self).__contains__
63 65 return orig(filename) or orig(
64 66 lfutil.standin(filename)
65 67 )
66 68
67 69 man1.__class__ = lfilesmanifest
68 70 return man1
69 71
70 72 extensions.wrapfunction(ctx, 'manifest', manifest)
71 73
72 74 def filectx(orig, path, fileid=None, filelog=None):
73 75 try:
74 76 if filelog is not None:
75 77 result = orig(path, fileid, filelog)
76 78 else:
77 79 result = orig(path, fileid)
78 80 except error.LookupError:
79 81 # Adding a null character will cause Mercurial to
80 82 # identify this as a binary file.
81 83 if filelog is not None:
82 84 result = orig(lfutil.standin(path), fileid, filelog)
83 85 else:
84 86 result = orig(lfutil.standin(path), fileid)
85 87 olddata = result.data
86 88 result.data = lambda: olddata() + b'\0'
87 89 return result
88 90
89 91 extensions.wrapfunction(ctx, 'filectx', filectx)
90 92
91 93 return ctx
92 94
93 95 # Figure out the status of big files and insert them into the
94 96 # appropriate list in the result. Also removes standin files
95 97 # from the listing. Revert to the original status if
96 98 # self.lfstatus is False.
97 99 # XXX large file status is buggy when used on repo proxy.
98 100 # XXX this needs to be investigated.
99 101 @localrepo.unfilteredmethod
100 102 def status(
101 103 self,
102 104 node1=b'.',
103 105 node2=None,
104 106 match=None,
105 107 ignored=False,
106 108 clean=False,
107 109 unknown=False,
108 110 listsubrepos=False,
109 111 ):
110 112 listignored, listclean, listunknown = ignored, clean, unknown
111 113 orig = super(lfilesrepo, self).status
112 114 if not self.lfstatus:
113 115 return orig(
114 116 node1,
115 117 node2,
116 118 match,
117 119 listignored,
118 120 listclean,
119 121 listunknown,
120 122 listsubrepos,
121 123 )
122 124
123 125 # some calls in this function rely on the old version of status
124 126 self.lfstatus = False
125 127 ctx1 = self[node1]
126 128 ctx2 = self[node2]
127 129 working = ctx2.rev() is None
128 130 parentworking = working and ctx1 == self[b'.']
129 131
130 132 if match is None:
131 133 match = matchmod.always()
132 134
133 135 try:
134 136 # updating the dirstate is optional
135 137 # so we don't wait on the lock
136 138 wlock = self.wlock(False)
137 139 gotlock = True
138 140 except error.LockError:
139 141 wlock = util.nullcontextmanager()
140 142 gotlock = False
141 143 with wlock:
142 144
143 145 # First check if paths or patterns were specified on the
144 146 # command line. If there were, and they don't match any
145 147 # largefiles, we should just bail here and let super
146 148 # handle it -- thus gaining a big performance boost.
147 149 lfdirstate = lfutil.openlfdirstate(ui, self)
148 150 if not match.always():
149 151 for f in lfdirstate:
150 152 if match(f):
151 153 break
152 154 else:
153 155 return orig(
154 156 node1,
155 157 node2,
156 158 match,
157 159 listignored,
158 160 listclean,
159 161 listunknown,
160 162 listsubrepos,
161 163 )
162 164
163 165 # Create a copy of match that matches standins instead
164 166 # of largefiles.
165 167 def tostandins(files):
166 168 if not working:
167 169 return files
168 170 newfiles = []
169 171 dirstate = self.dirstate
170 172 for f in files:
171 173 sf = lfutil.standin(f)
172 174 if sf in dirstate:
173 175 newfiles.append(sf)
174 176 elif dirstate.hasdir(sf):
175 177 # Directory entries could be regular or
176 178 # standin, check both
177 179 newfiles.extend((f, sf))
178 180 else:
179 181 newfiles.append(f)
180 182 return newfiles
181 183
182 184 m = copy.copy(match)
183 185 m._files = tostandins(m._files)
184 186
185 187 result = orig(
186 188 node1, node2, m, ignored, clean, unknown, listsubrepos
187 189 )
188 190 if working:
189 191
190 192 def sfindirstate(f):
191 193 sf = lfutil.standin(f)
192 194 dirstate = self.dirstate
193 195 return sf in dirstate or dirstate.hasdir(sf)
194 196
195 197 match._files = [f for f in match._files if sfindirstate(f)]
196 198 # Don't waste time getting the ignored and unknown
197 199 # files from lfdirstate
198 unsure, s = lfdirstate.status(
200 unsure, s, mtime_boundary = lfdirstate.status(
199 201 match,
200 202 subrepos=[],
201 203 ignored=False,
202 204 clean=listclean,
203 205 unknown=False,
204 206 )
205 207 (modified, added, removed, deleted, clean) = (
206 208 s.modified,
207 209 s.added,
208 210 s.removed,
209 211 s.deleted,
210 212 s.clean,
211 213 )
212 214 if parentworking:
215 wctx = repo[None]
213 216 for lfile in unsure:
214 217 standin = lfutil.standin(lfile)
215 218 if standin not in ctx1:
216 219 # from second parent
217 220 modified.append(lfile)
218 221 elif lfutil.readasstandin(
219 222 ctx1[standin]
220 223 ) != lfutil.hashfile(self.wjoin(lfile)):
221 224 modified.append(lfile)
222 225 else:
223 226 if listclean:
224 227 clean.append(lfile)
225 lfdirstate.set_clean(lfile)
228 s = wctx[lfile].lstat()
229 mode = s.st_mode
230 size = s.st_size
231 mtime = timestamp.reliable_mtime_of(
232 s, mtime_boundary
233 )
234 if mtime is not None:
235 cache_data = (mode, size, mtime)
236 lfdirstate.set_clean(lfile, cache_data)
226 237 else:
227 238 tocheck = unsure + modified + added + clean
228 239 modified, added, clean = [], [], []
229 240 checkexec = self.dirstate._checkexec
230 241
231 242 for lfile in tocheck:
232 243 standin = lfutil.standin(lfile)
233 244 if standin in ctx1:
234 245 abslfile = self.wjoin(lfile)
235 246 if (
236 247 lfutil.readasstandin(ctx1[standin])
237 248 != lfutil.hashfile(abslfile)
238 249 ) or (
239 250 checkexec
240 251 and (b'x' in ctx1.flags(standin))
241 252 != bool(lfutil.getexecutable(abslfile))
242 253 ):
243 254 modified.append(lfile)
244 255 elif listclean:
245 256 clean.append(lfile)
246 257 else:
247 258 added.append(lfile)
248 259
249 260 # at this point, 'removed' contains largefiles
250 261 # marked as 'R' in the working context.
251 262 # then, largefiles not managed also in the target
252 263 # context should be excluded from 'removed'.
253 264 removed = [
254 265 lfile
255 266 for lfile in removed
256 267 if lfutil.standin(lfile) in ctx1
257 268 ]
258 269
259 270 # Standins no longer found in lfdirstate have been deleted
260 271 for standin in ctx1.walk(lfutil.getstandinmatcher(self)):
261 272 lfile = lfutil.splitstandin(standin)
262 273 if not match(lfile):
263 274 continue
264 275 if lfile not in lfdirstate:
265 276 deleted.append(lfile)
266 277 # Sync "largefile has been removed" back to the
267 278 # standin. Removing a file as a side effect of
268 279 # running status is gross, but the alternatives (if
269 280 # any) are worse.
270 281 self.wvfs.unlinkpath(standin, ignoremissing=True)
271 282
272 283 # Filter result lists
273 284 result = list(result)
274 285
275 286 # Largefiles are not really removed when they're
276 287 # still in the normal dirstate. Likewise, normal
277 288 # files are not really removed if they are still in
278 289 # lfdirstate. This happens in merges where files
279 290 # change type.
280 291 removed = [f for f in removed if f not in self.dirstate]
281 292 result[2] = [f for f in result[2] if f not in lfdirstate]
282 293
283 294 lfiles = set(lfdirstate)
284 295 # Unknown files
285 296 result[4] = set(result[4]).difference(lfiles)
286 297 # Ignored files
287 298 result[5] = set(result[5]).difference(lfiles)
288 299 # combine normal files and largefiles
289 300 normals = [
290 301 [fn for fn in filelist if not lfutil.isstandin(fn)]
291 302 for filelist in result
292 303 ]
293 304 lfstatus = (
294 305 modified,
295 306 added,
296 307 removed,
297 308 deleted,
298 309 [],
299 310 [],
300 311 clean,
301 312 )
302 313 result = [
303 314 sorted(list1 + list2)
304 315 for (list1, list2) in zip(normals, lfstatus)
305 316 ]
306 317 else: # not against working directory
307 318 result = [
308 319 [lfutil.splitstandin(f) or f for f in items]
309 320 for items in result
310 321 ]
311 322
312 323 if gotlock:
313 324 lfdirstate.write(self.currenttransaction())
314 325
315 326 self.lfstatus = True
316 327 return scmutil.status(*result)
317 328
318 329 def commitctx(self, ctx, *args, **kwargs):
319 330 node = super(lfilesrepo, self).commitctx(ctx, *args, **kwargs)
320 331
321 332 class lfilesctx(ctx.__class__):
322 333 def markcommitted(self, node):
323 334 orig = super(lfilesctx, self).markcommitted
324 335 return lfutil.markcommitted(orig, self, node)
325 336
326 337 ctx.__class__ = lfilesctx
327 338 return node
328 339
329 340 # Before commit, largefile standins have not had their
330 341 # contents updated to reflect the hash of their largefile.
331 342 # Do that here.
332 343 def commit(
333 344 self,
334 345 text=b"",
335 346 user=None,
336 347 date=None,
337 348 match=None,
338 349 force=False,
339 350 editor=False,
340 351 extra=None,
341 352 ):
342 353 if extra is None:
343 354 extra = {}
344 355 orig = super(lfilesrepo, self).commit
345 356
346 357 with self.wlock():
347 358 lfcommithook = self._lfcommithooks[-1]
348 359 match = lfcommithook(self, match)
349 360 result = orig(
350 361 text=text,
351 362 user=user,
352 363 date=date,
353 364 match=match,
354 365 force=force,
355 366 editor=editor,
356 367 extra=extra,
357 368 )
358 369 return result
359 370
360 371 # TODO: _subdirlfs should be moved into "lfutil.py", because
361 372 # it is referred only from "lfutil.updatestandinsbymatch"
362 373 def _subdirlfs(self, files, lfiles):
363 374 """
364 375 Adjust matched file list
365 376 If we pass a directory to commit whose only committable files
366 377 are largefiles, the core commit code aborts before finding
367 378 the largefiles.
368 379 So we do the following:
369 380 For directories that only have largefiles as matches,
370 381 we explicitly add the largefiles to the match list and remove
371 382 the directory.
372 383 In other cases, we leave the match list unmodified.
373 384 """
374 385 actualfiles = []
375 386 dirs = []
376 387 regulars = []
377 388
378 389 for f in files:
379 390 if lfutil.isstandin(f + b'/'):
380 391 raise error.Abort(
381 392 _(b'file "%s" is a largefile standin') % f,
382 393 hint=b'commit the largefile itself instead',
383 394 )
384 395 # Scan directories
385 396 if self.wvfs.isdir(f):
386 397 dirs.append(f)
387 398 else:
388 399 regulars.append(f)
389 400
390 401 for f in dirs:
391 402 matcheddir = False
392 403 d = self.dirstate.normalize(f) + b'/'
393 404 # Check for matched normal files
394 405 for mf in regulars:
395 406 if self.dirstate.normalize(mf).startswith(d):
396 407 actualfiles.append(f)
397 408 matcheddir = True
398 409 break
399 410 if not matcheddir:
400 411 # If no normal match, manually append
401 412 # any matching largefiles
402 413 for lf in lfiles:
403 414 if self.dirstate.normalize(lf).startswith(d):
404 415 actualfiles.append(lf)
405 416 if not matcheddir:
406 417 # There may still be normal files in the dir, so
407 418 # add a directory to the list, which
408 419 # forces status/dirstate to walk all files and
409 420 # call the match function on the matcher, even
410 421 # on case sensitive filesystems.
411 422 actualfiles.append(b'.')
412 423 matcheddir = True
413 424 # Nothing in dir, so readd it
414 425 # and let commit reject it
415 426 if not matcheddir:
416 427 actualfiles.append(f)
417 428
418 429 # Always add normal files
419 430 actualfiles += regulars
420 431 return actualfiles
421 432
422 433 repo.__class__ = lfilesrepo
423 434
424 435 # stack of hooks being executed before committing.
425 436 # only last element ("_lfcommithooks[-1]") is used for each committing.
426 437 repo._lfcommithooks = [lfutil.updatestandinsbymatch]
427 438
428 439 # Stack of status writer functions taking "*msg, **opts" arguments
429 440 # like "ui.status()". Only last element ("_lfstatuswriters[-1]")
430 441 # is used to write status out.
431 442 repo._lfstatuswriters = [ui.status]
432 443
433 444 def prepushoutgoinghook(pushop):
434 445 """Push largefiles for pushop before pushing revisions."""
435 446 lfrevs = pushop.lfrevs
436 447 if lfrevs is None:
437 448 lfrevs = pushop.outgoing.missing
438 449 if lfrevs:
439 450 toupload = set()
440 451 addfunc = lambda fn, lfhash: toupload.add(lfhash)
441 452 lfutil.getlfilestoupload(pushop.repo, lfrevs, addfunc)
442 453 lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload)
443 454
444 455 repo.prepushoutgoinghooks.add(b"largefiles", prepushoutgoinghook)
445 456
446 457 def checkrequireslfiles(ui, repo, **kwargs):
447 if b'largefiles' not in repo.requirements and any(
448 lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles()
449 ):
450 repo.requirements.add(b'largefiles')
451 scmutil.writereporequirements(repo)
458 with repo.lock():
459 if b'largefiles' not in repo.requirements and any(
460 lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles()
461 ):
462 repo.requirements.add(b'largefiles')
463 scmutil.writereporequirements(repo)
452 464
453 465 ui.setconfig(
454 466 b'hooks', b'changegroup.lfiles', checkrequireslfiles, b'largefiles'
455 467 )
456 468 ui.setconfig(b'hooks', b'commit.lfiles', checkrequireslfiles, b'largefiles')
@@ -1,444 +1,447 b''
1 1 # lfs - hash-preserving large file support using Git-LFS protocol
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """lfs - large file support (EXPERIMENTAL)
9 9
10 10 This extension allows large files to be tracked outside of the normal
11 11 repository storage and stored on a centralized server, similar to the
12 12 ``largefiles`` extension. The ``git-lfs`` protocol is used when
13 13 communicating with the server, so existing git infrastructure can be
14 14 harnessed. Even though the files are stored outside of the repository,
15 15 they are still integrity checked in the same manner as normal files.
16 16
17 17 The files stored outside of the repository are downloaded on demand,
18 18 which reduces the time to clone, and possibly the local disk usage.
19 19 This changes fundamental workflows in a DVCS, so careful thought
20 20 should be given before deploying it. :hg:`convert` can be used to
21 21 convert LFS repositories to normal repositories that no longer
22 22 require this extension, and do so without changing the commit hashes.
23 23 This allows the extension to be disabled if the centralized workflow
24 24 becomes burdensome. However, the pre and post convert clones will
25 25 not be able to communicate with each other unless the extension is
26 26 enabled on both.
27 27
28 28 To start a new repository, or to add LFS files to an existing one, just
29 29 create an ``.hglfs`` file as described below in the root directory of
30 30 the repository. Typically, this file should be put under version
31 31 control, so that the settings will propagate to other repositories with
32 32 push and pull. During any commit, Mercurial will consult this file to
33 33 determine if an added or modified file should be stored externally. The
34 34 type of storage depends on the characteristics of the file at each
35 35 commit. A file that is near a size threshold may switch back and forth
36 36 between LFS and normal storage, as needed.
37 37
38 38 Alternately, both normal repositories and largefile controlled
39 39 repositories can be converted to LFS by using :hg:`convert` and the
40 40 ``lfs.track`` config option described below. The ``.hglfs`` file
41 41 should then be created and added, to control subsequent LFS selection.
42 42 The hashes are also unchanged in this case. The LFS and non-LFS
43 43 repositories can be distinguished because the LFS repository will
44 44 abort any command if this extension is disabled.
45 45
46 46 Committed LFS files are held locally, until the repository is pushed.
47 47 Prior to pushing the normal repository data, the LFS files that are
48 48 tracked by the outgoing commits are automatically uploaded to the
49 49 configured central server. No LFS files are transferred on
50 50 :hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
51 51 demand as they need to be read, if a cached copy cannot be found
52 52 locally. Both committing and downloading an LFS file will link the
53 53 file to a usercache, to speed up future access. See the `usercache`
54 54 config setting described below.
55 55
56 56 The extension reads its configuration from a versioned ``.hglfs``
57 57 configuration file found in the root of the working directory. The
58 58 ``.hglfs`` file uses the same syntax as all other Mercurial
59 59 configuration files. It uses a single section, ``[track]``.
60 60
61 61 The ``[track]`` section specifies which files are stored as LFS (or
62 62 not). Each line is keyed by a file pattern, with a predicate value.
63 63 The first file pattern match is used, so put more specific patterns
64 64 first. The available predicates are ``all()``, ``none()``, and
65 65 ``size()``. See "hg help filesets.size" for the latter.
66 66
67 67 Example versioned ``.hglfs`` file::
68 68
69 69 [track]
70 70 # No Makefile or python file, anywhere, will be LFS
71 71 **Makefile = none()
72 72 **.py = none()
73 73
74 74 **.zip = all()
75 75 **.exe = size(">1MB")
76 76
77 77 # Catchall for everything not matched above
78 78 ** = size(">10MB")
79 79
80 80 Configs::
81 81
82 82 [lfs]
83 83 # Remote endpoint. Multiple protocols are supported:
84 84 # - http(s)://user:pass@example.com/path
85 85 # git-lfs endpoint
86 86 # - file:///tmp/path
87 87 # local filesystem, usually for testing
88 88 # if unset, lfs will assume the remote repository also handles blob storage
89 89 # for http(s) URLs. Otherwise, lfs will prompt to set this when it must
90 90 # use this value.
91 91 # (default: unset)
92 92 url = https://example.com/repo.git/info/lfs
93 93
94 94 # Which files to track in LFS. Path tests are "**.extname" for file
95 95 # extensions, and "path:under/some/directory" for path prefix. Both
96 96 # are relative to the repository root.
97 97 # File size can be tested with the "size()" fileset, and tests can be
98 98 # joined with fileset operators. (See "hg help filesets.operators".)
99 99 #
100 100 # Some examples:
101 101 # - all() # everything
102 102 # - none() # nothing
103 103 # - size(">20MB") # larger than 20MB
104 104 # - !**.txt # anything not a *.txt file
105 105 # - **.zip | **.tar.gz | **.7z # some types of compressed files
106 106 # - path:bin # files under "bin" in the project root
107 107 # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
108 108 # | (path:bin & !path:/bin/README) | size(">1GB")
109 109 # (default: none())
110 110 #
111 111 # This is ignored if there is a tracked '.hglfs' file, and this setting
112 112 # will eventually be deprecated and removed.
113 113 track = size(">10M")
114 114
115 115 # how many times to retry before giving up on transferring an object
116 116 retry = 5
117 117
118 118 # the local directory to store lfs files for sharing across local clones.
119 119 # If not set, the cache is located in an OS specific cache location.
120 120 usercache = /path/to/global/cache
121 121 """
122 122
123 123 from __future__ import absolute_import
124 124
125 125 import sys
126 126
127 127 from mercurial.i18n import _
128 128 from mercurial.node import bin
129 129
130 130 from mercurial import (
131 131 bundlecaches,
132 132 config,
133 133 context,
134 134 error,
135 135 extensions,
136 136 exthelper,
137 137 filelog,
138 138 filesetlang,
139 139 localrepo,
140 140 logcmdutil,
141 141 minifileset,
142 142 pycompat,
143 143 revlog,
144 144 scmutil,
145 145 templateutil,
146 146 util,
147 147 )
148 148
149 149 from mercurial.interfaces import repository
150 150
151 151 from . import (
152 152 blobstore,
153 153 wireprotolfsserver,
154 154 wrapper,
155 155 )
156 156
157 157 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
158 158 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
159 159 # be specifying the version(s) of Mercurial they are tested with, or
160 160 # leave the attribute unspecified.
161 161 testedwith = b'ships-with-hg-core'
162 162
163 163 eh = exthelper.exthelper()
164 164 eh.merge(wrapper.eh)
165 165 eh.merge(wireprotolfsserver.eh)
166 166
167 167 cmdtable = eh.cmdtable
168 168 configtable = eh.configtable
169 169 extsetup = eh.finalextsetup
170 170 uisetup = eh.finaluisetup
171 171 filesetpredicate = eh.filesetpredicate
172 172 reposetup = eh.finalreposetup
173 173 templatekeyword = eh.templatekeyword
174 174
175 175 eh.configitem(
176 176 b'experimental',
177 177 b'lfs.serve',
178 178 default=True,
179 179 )
180 180 eh.configitem(
181 181 b'experimental',
182 182 b'lfs.user-agent',
183 183 default=None,
184 184 )
185 185 eh.configitem(
186 186 b'experimental',
187 187 b'lfs.disableusercache',
188 188 default=False,
189 189 )
190 190 eh.configitem(
191 191 b'experimental',
192 192 b'lfs.worker-enable',
193 193 default=True,
194 194 )
195 195
196 196 eh.configitem(
197 197 b'lfs',
198 198 b'url',
199 199 default=None,
200 200 )
201 201 eh.configitem(
202 202 b'lfs',
203 203 b'usercache',
204 204 default=None,
205 205 )
206 206 # Deprecated
207 207 eh.configitem(
208 208 b'lfs',
209 209 b'threshold',
210 210 default=None,
211 211 )
212 212 eh.configitem(
213 213 b'lfs',
214 214 b'track',
215 215 default=b'none()',
216 216 )
217 217 eh.configitem(
218 218 b'lfs',
219 219 b'retry',
220 220 default=5,
221 221 )
222 222
223 223 lfsprocessor = (
224 224 wrapper.readfromstore,
225 225 wrapper.writetostore,
226 226 wrapper.bypasscheckhash,
227 227 )
228 228
229 229
230 230 def featuresetup(ui, supported):
231 231 # don't die on seeing a repo with the lfs requirement
232 232 supported |= {b'lfs'}
233 233
234 234
235 235 @eh.uisetup
236 236 def _uisetup(ui):
237 237 localrepo.featuresetupfuncs.add(featuresetup)
238 238
239 239
240 240 @eh.reposetup
241 241 def _reposetup(ui, repo):
242 242 # Nothing to do with a remote repo
243 243 if not repo.local():
244 244 return
245 245
246 246 repo.svfs.lfslocalblobstore = blobstore.local(repo)
247 247 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
248 248
249 249 class lfsrepo(repo.__class__):
250 250 @localrepo.unfilteredmethod
251 251 def commitctx(self, ctx, error=False, origctx=None):
252 252 repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
253 253 return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
254 254
255 255 repo.__class__ = lfsrepo
256 256
257 257 if b'lfs' not in repo.requirements:
258 258
259 259 def checkrequireslfs(ui, repo, **kwargs):
260 if b'lfs' in repo.requirements:
261 return 0
260 with repo.lock():
261 if b'lfs' in repo.requirements:
262 return 0
262 263
263 last = kwargs.get('node_last')
264 if last:
265 s = repo.set(b'%n:%n', bin(kwargs['node']), bin(last))
266 else:
267 s = repo.set(b'%n', bin(kwargs['node']))
268 match = repo._storenarrowmatch
269 for ctx in s:
270 # TODO: is there a way to just walk the files in the commit?
271 if any(
272 ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
273 ):
274 repo.requirements.add(b'lfs')
275 repo.features.add(repository.REPO_FEATURE_LFS)
276 scmutil.writereporequirements(repo)
277 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
278 break
264 last = kwargs.get('node_last')
265 if last:
266 s = repo.set(b'%n:%n', bin(kwargs['node']), bin(last))
267 else:
268 s = repo.set(b'%n', bin(kwargs['node']))
269 match = repo._storenarrowmatch
270 for ctx in s:
271 # TODO: is there a way to just walk the files in the commit?
272 if any(
273 ctx[f].islfs()
274 for f in ctx.files()
275 if f in ctx and match(f)
276 ):
277 repo.requirements.add(b'lfs')
278 repo.features.add(repository.REPO_FEATURE_LFS)
279 scmutil.writereporequirements(repo)
280 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
281 break
279 282
280 283 ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
281 284 ui.setconfig(
282 285 b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
283 286 )
284 287 else:
285 288 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
286 289
287 290
288 291 def _trackedmatcher(repo):
289 292 """Return a function (path, size) -> bool indicating whether or not to
290 293 track a given file with lfs."""
291 294 if not repo.wvfs.exists(b'.hglfs'):
292 295 # No '.hglfs' in wdir. Fallback to config for now.
293 296 trackspec = repo.ui.config(b'lfs', b'track')
294 297
295 298 # deprecated config: lfs.threshold
296 299 threshold = repo.ui.configbytes(b'lfs', b'threshold')
297 300 if threshold:
298 301 filesetlang.parse(trackspec) # make sure syntax errors are confined
299 302 trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
300 303
301 304 return minifileset.compile(trackspec)
302 305
303 306 data = repo.wvfs.tryread(b'.hglfs')
304 307 if not data:
305 308 return lambda p, s: False
306 309
307 310 # Parse errors here will abort with a message that points to the .hglfs file
308 311 # and line number.
309 312 cfg = config.config()
310 313 cfg.parse(b'.hglfs', data)
311 314
312 315 try:
313 316 rules = [
314 317 (minifileset.compile(pattern), minifileset.compile(rule))
315 318 for pattern, rule in cfg.items(b'track')
316 319 ]
317 320 except error.ParseError as e:
318 321 # The original exception gives no indicator that the error is in the
319 322 # .hglfs file, so add that.
320 323
321 324 # TODO: See if the line number of the file can be made available.
322 325 raise error.Abort(_(b'parse error in .hglfs: %s') % e)
323 326
324 327 def _match(path, size):
325 328 for pat, rule in rules:
326 329 if pat(path, size):
327 330 return rule(path, size)
328 331
329 332 return False
330 333
331 334 return _match
332 335
333 336
334 337 # Called by remotefilelog
335 338 def wrapfilelog(filelog):
336 339 wrapfunction = extensions.wrapfunction
337 340
338 341 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
339 342 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
340 343 wrapfunction(filelog, 'size', wrapper.filelogsize)
341 344
342 345
343 346 @eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
344 347 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
345 348 opts = orig(ui, requirements, features)
346 349 for name, module in extensions.extensions(ui):
347 350 if module is sys.modules[__name__]:
348 351 if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
349 352 msg = (
350 353 _(b"cannot register multiple processors on flag '%#x'.")
351 354 % revlog.REVIDX_EXTSTORED
352 355 )
353 356 raise error.Abort(msg)
354 357
355 358 opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
356 359 break
357 360
358 361 return opts
359 362
360 363
361 364 @eh.extsetup
362 365 def _extsetup(ui):
363 366 wrapfilelog(filelog.filelog)
364 367
365 368 context.basefilectx.islfs = wrapper.filectxislfs
366 369
367 370 scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
368 371
369 372 # Make bundle choose changegroup3 instead of changegroup2. This affects
370 373 # "hg bundle" command. Note: it does not cover all bundle formats like
371 374 # "packed1". Using "packed1" with lfs will likely cause trouble.
372 375 bundlecaches._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
373 376
374 377
375 378 @eh.filesetpredicate(b'lfs()')
376 379 def lfsfileset(mctx, x):
377 380 """File that uses LFS storage."""
378 381 # i18n: "lfs" is a keyword
379 382 filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
380 383 ctx = mctx.ctx
381 384
382 385 def lfsfilep(f):
383 386 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
384 387
385 388 return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
386 389
387 390
388 391 @eh.templatekeyword(b'lfs_files', requires={b'ctx'})
389 392 def lfsfiles(context, mapping):
390 393 """List of strings. All files modified, added, or removed by this
391 394 changeset."""
392 395 ctx = context.resource(mapping, b'ctx')
393 396
394 397 pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
395 398 files = sorted(pointers.keys())
396 399
397 400 def pointer(v):
398 401 # In the file spec, version is first and the other keys are sorted.
399 402 sortkeyfunc = lambda x: (x[0] != b'version', x)
400 403 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
401 404 return util.sortdict(items)
402 405
403 406 makemap = lambda v: {
404 407 b'file': v,
405 408 b'lfsoid': pointers[v].oid() if pointers[v] else None,
406 409 b'lfspointer': templateutil.hybriddict(pointer(v)),
407 410 }
408 411
409 412 # TODO: make the separator ', '?
410 413 f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
411 414 return templateutil.hybrid(f, files, makemap, pycompat.identity)
412 415
413 416
414 417 @eh.command(
415 418 b'debuglfsupload',
416 419 [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
417 420 )
418 421 def debuglfsupload(ui, repo, **opts):
419 422 """upload lfs blobs added by the working copy parent or given revisions"""
420 423 revs = opts.get('rev', [])
421 424 pointers = wrapper.extractpointers(repo, logcmdutil.revrange(repo, revs))
422 425 wrapper.uploadblobs(repo, pointers)
423 426
424 427
425 428 @eh.wrapcommand(
426 429 b'verify',
427 430 opts=[(b'', b'no-lfs', None, _(b'skip missing lfs blob content'))],
428 431 )
429 432 def verify(orig, ui, repo, **opts):
430 433 skipflags = repo.ui.configint(b'verify', b'skipflags')
431 434 no_lfs = opts.pop('no_lfs')
432 435
433 436 if skipflags:
434 437 # --lfs overrides the config bit, if set.
435 438 if no_lfs is False:
436 439 skipflags &= ~repository.REVISION_FLAG_EXTSTORED
437 440 else:
438 441 skipflags = 0
439 442
440 443 if no_lfs is True:
441 444 skipflags |= repository.REVISION_FLAG_EXTSTORED
442 445
443 446 with ui.configoverride({(b'verify', b'skipflags'): skipflags}):
444 447 return orig(ui, repo, **opts)
@@ -1,76 +1,76 b''
1 1 # narrowdirstate.py - extensions to mercurial dirstate to support narrow clones
2 2 #
3 3 # Copyright 2017 Google, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from mercurial.i18n import _
11 11 from mercurial import error
12 12
13 13
14 14 def wrapdirstate(repo, dirstate):
15 15 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
16 16
17 17 def _editfunc(fn):
18 18 def _wrapper(self, *args, **kwargs):
19 19 narrowmatch = repo.narrowmatch()
20 20 for f in args:
21 21 if f is not None and not narrowmatch(f) and f not in self:
22 22 raise error.Abort(
23 23 _(
24 24 b"cannot track '%s' - it is outside "
25 25 + b"the narrow clone"
26 26 )
27 27 % f
28 28 )
29 29 return fn(self, *args, **kwargs)
30 30
31 31 return _wrapper
32 32
33 33 class narrowdirstate(dirstate.__class__):
34 34 # Prevent adding/editing/copying/deleting files that are outside the
35 35 # sparse checkout
36 36 @_editfunc
37 37 def normal(self, *args, **kwargs):
38 38 return super(narrowdirstate, self).normal(*args, **kwargs)
39 39
40 40 @_editfunc
41 def set_tracked(self, *args):
42 return super(narrowdirstate, self).set_tracked(*args)
41 def set_tracked(self, *args, **kwargs):
42 return super(narrowdirstate, self).set_tracked(*args, **kwargs)
43 43
44 44 @_editfunc
45 45 def set_untracked(self, *args):
46 46 return super(narrowdirstate, self).set_untracked(*args)
47 47
48 48 @_editfunc
49 49 def add(self, *args):
50 50 return super(narrowdirstate, self).add(*args)
51 51
52 52 @_editfunc
53 53 def normallookup(self, *args):
54 54 return super(narrowdirstate, self).normallookup(*args)
55 55
56 56 @_editfunc
57 57 def copy(self, *args):
58 58 return super(narrowdirstate, self).copy(*args)
59 59
60 60 @_editfunc
61 61 def remove(self, *args):
62 62 return super(narrowdirstate, self).remove(*args)
63 63
64 64 @_editfunc
65 65 def merge(self, *args):
66 66 return super(narrowdirstate, self).merge(*args)
67 67
68 68 def rebuild(self, parent, allfiles, changedfiles=None):
69 69 if changedfiles is None:
70 70 # Rebuilding entire dirstate, let's filter allfiles to match the
71 71 # narrowspec.
72 72 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
73 73 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
74 74
75 75 dirstate.__class__ = narrowdirstate
76 76 return dirstate
@@ -1,656 +1,659 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''hooks for sending email push notifications
9 9
10 10 This extension implements hooks to send email notifications when
11 11 changesets are sent from or received by the local repository.
12 12
13 13 First, enable the extension as explained in :hg:`help extensions`, and
14 14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
15 15 are run when changesets are received, while ``outgoing`` hooks are for
16 16 changesets sent to another repository::
17 17
18 18 [hooks]
19 19 # one email for each incoming changeset
20 20 incoming.notify = python:hgext.notify.hook
21 21 # one email for all incoming changesets
22 22 changegroup.notify = python:hgext.notify.hook
23 23
24 24 # one email for all outgoing changesets
25 25 outgoing.notify = python:hgext.notify.hook
26 26
27 27 This registers the hooks. To enable notification, subscribers must
28 28 be assigned to repositories. The ``[usersubs]`` section maps multiple
29 29 repositories to a given recipient. The ``[reposubs]`` section maps
30 30 multiple recipients to a single repository::
31 31
32 32 [usersubs]
33 33 # key is subscriber email, value is a comma-separated list of repo patterns
34 34 user@host = pattern
35 35
36 36 [reposubs]
37 37 # key is repo pattern, value is a comma-separated list of subscriber emails
38 38 pattern = user@host
39 39
40 40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
41 41 optionally combined with a revset expression. A revset expression, if
42 42 present, is separated from the glob by a hash. Example::
43 43
44 44 [reposubs]
45 45 */widgets#branch(release) = qa-team@example.com
46 46
47 47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
48 48 branch triggers a notification in any repository ending in ``widgets``.
49 49
50 50 In order to place them under direct user management, ``[usersubs]`` and
51 51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
52 52 incorporated by reference::
53 53
54 54 [notify]
55 55 config = /path/to/subscriptionsfile
56 56
57 57 Notifications will not be sent until the ``notify.test`` value is set
58 58 to ``False``; see below.
59 59
60 60 Notifications content can be tweaked with the following configuration entries:
61 61
62 62 notify.test
63 63 If ``True``, print messages to stdout instead of sending them. Default: True.
64 64
65 65 notify.sources
66 66 Space-separated list of change sources. Notifications are activated only
67 67 when a changeset's source is in this list. Sources may be:
68 68
69 69 :``serve``: changesets received via http or ssh
70 70 :``pull``: changesets received via ``hg pull``
71 71 :``unbundle``: changesets received via ``hg unbundle``
72 72 :``push``: changesets sent or received via ``hg push``
73 73 :``bundle``: changesets sent via ``hg unbundle``
74 74
75 75 Default: serve.
76 76
77 77 notify.strip
78 78 Number of leading slashes to strip from url paths. By default, notifications
79 79 reference repositories with their absolute path. ``notify.strip`` lets you
80 80 turn them into relative paths. For example, ``notify.strip=3`` will change
81 81 ``/long/path/repository`` into ``repository``. Default: 0.
82 82
83 83 notify.domain
84 84 Default email domain for sender or recipients with no explicit domain.
85 85 It is also used for the domain part of the ``Message-Id`` when using
86 86 ``notify.messageidseed``.
87 87
88 88 notify.messageidseed
89 89 Create deterministic ``Message-Id`` headers for the mails based on the seed
90 90 and the revision identifier of the first commit in the changeset.
91 91
92 92 notify.style
93 93 Style file to use when formatting emails.
94 94
95 95 notify.template
96 96 Template to use when formatting emails.
97 97
98 98 notify.incoming
99 99 Template to use when run as an incoming hook, overriding ``notify.template``.
100 100
101 101 notify.outgoing
102 102 Template to use when run as an outgoing hook, overriding ``notify.template``.
103 103
104 104 notify.changegroup
105 105 Template to use when running as a changegroup hook, overriding
106 106 ``notify.template``.
107 107
108 108 notify.maxdiff
109 109 Maximum number of diff lines to include in notification email. Set to 0
110 110 to disable the diff, or -1 to include all of it. Default: 300.
111 111
112 112 notify.maxdiffstat
113 113 Maximum number of diffstat lines to include in notification email. Set to -1
114 114 to include all of it. Default: -1.
115 115
116 116 notify.maxsubject
117 117 Maximum number of characters in email's subject line. Default: 67.
118 118
119 119 notify.diffstat
120 120 Set to True to include a diffstat before diff content. Default: True.
121 121
122 122 notify.showfunc
123 123 If set, override ``diff.showfunc`` for the diff content. Default: None.
124 124
125 125 notify.merge
126 126 If True, send notifications for merge changesets. Default: True.
127 127
128 128 notify.mbox
129 129 If set, append mails to this mbox file instead of sending. Default: None.
130 130
131 131 notify.fromauthor
132 132 If set, use the committer of the first changeset in a changegroup for
133 133 the "From" field of the notification mail. If not set, take the user
134 134 from the pushing repo. Default: False.
135 135
136 136 notify.reply-to-predecessor (EXPERIMENTAL)
137 137 If set and the changeset has a predecessor in the repository, try to thread
138 138 the notification mail with the predecessor. This adds the "In-Reply-To" header
139 139 to the notification mail with a reference to the predecessor with the smallest
140 140 revision number. Mail threads can still be torn, especially when changesets
141 141 are folded.
142 142
143 143 This option must be used in combination with ``notify.messageidseed``.
144 144
145 145 If set, the following entries will also be used to customize the
146 146 notifications:
147 147
148 148 email.from
149 149 Email ``From`` address to use if none can be found in the generated
150 150 email content.
151 151
152 152 web.baseurl
153 153 Root repository URL to combine with repository paths when making
154 154 references. See also ``notify.strip``.
155 155
156 156 '''
157 157 from __future__ import absolute_import
158 158
159 159 import email.errors as emailerrors
160 160 import email.utils as emailutils
161 161 import fnmatch
162 162 import hashlib
163 163 import socket
164 164 import time
165 165
166 166 from mercurial.i18n import _
167 167 from mercurial import (
168 168 encoding,
169 169 error,
170 170 logcmdutil,
171 171 mail,
172 172 obsutil,
173 173 patch,
174 174 pycompat,
175 175 registrar,
176 176 util,
177 177 )
178 178 from mercurial.utils import (
179 179 dateutil,
180 180 stringutil,
181 181 )
182 182
183 183 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
184 184 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
185 185 # be specifying the version(s) of Mercurial they are tested with, or
186 186 # leave the attribute unspecified.
187 187 testedwith = b'ships-with-hg-core'
188 188
189 189 configtable = {}
190 190 configitem = registrar.configitem(configtable)
191 191
192 192 configitem(
193 193 b'notify',
194 194 b'changegroup',
195 195 default=None,
196 196 )
197 197 configitem(
198 198 b'notify',
199 199 b'config',
200 200 default=None,
201 201 )
202 202 configitem(
203 203 b'notify',
204 204 b'diffstat',
205 205 default=True,
206 206 )
207 207 configitem(
208 208 b'notify',
209 209 b'domain',
210 210 default=None,
211 211 )
212 212 configitem(
213 213 b'notify',
214 214 b'messageidseed',
215 215 default=None,
216 216 )
217 217 configitem(
218 218 b'notify',
219 219 b'fromauthor',
220 220 default=None,
221 221 )
222 222 configitem(
223 223 b'notify',
224 224 b'incoming',
225 225 default=None,
226 226 )
227 227 configitem(
228 228 b'notify',
229 229 b'maxdiff',
230 230 default=300,
231 231 )
232 232 configitem(
233 233 b'notify',
234 234 b'maxdiffstat',
235 235 default=-1,
236 236 )
237 237 configitem(
238 238 b'notify',
239 239 b'maxsubject',
240 240 default=67,
241 241 )
242 242 configitem(
243 243 b'notify',
244 244 b'mbox',
245 245 default=None,
246 246 )
247 247 configitem(
248 248 b'notify',
249 249 b'merge',
250 250 default=True,
251 251 )
252 252 configitem(
253 253 b'notify',
254 254 b'outgoing',
255 255 default=None,
256 256 )
257 257 configitem(
258 258 b'notify',
259 259 b'reply-to-predecessor',
260 260 default=False,
261 261 )
262 262 configitem(
263 263 b'notify',
264 264 b'sources',
265 265 default=b'serve',
266 266 )
267 267 configitem(
268 268 b'notify',
269 269 b'showfunc',
270 270 default=None,
271 271 )
272 272 configitem(
273 273 b'notify',
274 274 b'strip',
275 275 default=0,
276 276 )
277 277 configitem(
278 278 b'notify',
279 279 b'style',
280 280 default=None,
281 281 )
282 282 configitem(
283 283 b'notify',
284 284 b'template',
285 285 default=None,
286 286 )
287 287 configitem(
288 288 b'notify',
289 289 b'test',
290 290 default=True,
291 291 )
292 292
293 293 # template for single changeset can include email headers.
294 294 single_template = b'''
295 295 Subject: changeset in {webroot}: {desc|firstline|strip}
296 296 From: {author}
297 297
298 298 changeset {node|short} in {root}
299 299 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
300 300 description:
301 301 \t{desc|tabindent|strip}
302 302 '''.lstrip()
303 303
304 304 # template for multiple changesets should not contain email headers,
305 305 # because only first set of headers will be used and result will look
306 306 # strange.
307 307 multiple_template = b'''
308 308 changeset {node|short} in {root}
309 309 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
310 310 summary: {desc|firstline}
311 311 '''
312 312
313 313 deftemplates = {
314 314 b'changegroup': multiple_template,
315 315 }
316 316
317 317
318 318 class notifier(object):
319 319 '''email notification class.'''
320 320
321 321 def __init__(self, ui, repo, hooktype):
322 322 self.ui = ui
323 323 cfg = self.ui.config(b'notify', b'config')
324 324 if cfg:
325 325 self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs'])
326 326 self.repo = repo
327 327 self.stripcount = int(self.ui.config(b'notify', b'strip'))
328 328 self.root = self.strip(self.repo.root)
329 329 self.domain = self.ui.config(b'notify', b'domain')
330 330 self.mbox = self.ui.config(b'notify', b'mbox')
331 331 self.test = self.ui.configbool(b'notify', b'test')
332 332 self.charsets = mail._charsets(self.ui)
333 333 self.subs = self.subscribers()
334 334 self.merge = self.ui.configbool(b'notify', b'merge')
335 335 self.showfunc = self.ui.configbool(b'notify', b'showfunc')
336 336 self.messageidseed = self.ui.config(b'notify', b'messageidseed')
337 337 self.reply = self.ui.configbool(b'notify', b'reply-to-predecessor')
338 338
339 339 if self.reply and not self.messageidseed:
340 340 raise error.Abort(
341 341 _(
342 342 b'notify.reply-to-predecessor used without '
343 343 b'notify.messageidseed'
344 344 )
345 345 )
346 346
347 347 if self.showfunc is None:
348 348 self.showfunc = self.ui.configbool(b'diff', b'showfunc')
349 349
350 350 mapfile = None
351 351 template = self.ui.config(b'notify', hooktype) or self.ui.config(
352 352 b'notify', b'template'
353 353 )
354 354 if not template:
355 355 mapfile = self.ui.config(b'notify', b'style')
356 356 if not mapfile and not template:
357 357 template = deftemplates.get(hooktype) or single_template
358 358 spec = logcmdutil.templatespec(template, mapfile)
359 359 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
360 360
361 361 def strip(self, path):
362 362 '''strip leading slashes from local path, turn into web-safe path.'''
363 363
364 364 path = util.pconvert(path)
365 365 count = self.stripcount
366 366 while count > 0:
367 367 c = path.find(b'/')
368 368 if c == -1:
369 369 break
370 370 path = path[c + 1 :]
371 371 count -= 1
372 372 return path
373 373
374 374 def fixmail(self, addr):
375 375 '''try to clean up email addresses.'''
376 376
377 377 addr = stringutil.email(addr.strip())
378 378 if self.domain:
379 379 a = addr.find(b'@localhost')
380 380 if a != -1:
381 381 addr = addr[:a]
382 382 if b'@' not in addr:
383 383 return addr + b'@' + self.domain
384 384 return addr
385 385
386 386 def subscribers(self):
387 387 '''return list of email addresses of subscribers to this repo.'''
388 388 subs = set()
389 389 for user, pats in self.ui.configitems(b'usersubs'):
390 390 for pat in pats.split(b','):
391 391 if b'#' in pat:
392 392 pat, revs = pat.split(b'#', 1)
393 393 else:
394 394 revs = None
395 395 if fnmatch.fnmatch(self.repo.root, pat.strip()):
396 396 subs.add((self.fixmail(user), revs))
397 397 for pat, users in self.ui.configitems(b'reposubs'):
398 398 if b'#' in pat:
399 399 pat, revs = pat.split(b'#', 1)
400 400 else:
401 401 revs = None
402 402 if fnmatch.fnmatch(self.repo.root, pat):
403 403 for user in users.split(b','):
404 404 subs.add((self.fixmail(user), revs))
405 405 return [
406 406 (mail.addressencode(self.ui, s, self.charsets, self.test), r)
407 407 for s, r in sorted(subs)
408 408 ]
409 409
410 410 def node(self, ctx, **props):
411 411 '''format one changeset, unless it is a suppressed merge.'''
412 412 if not self.merge and len(ctx.parents()) > 1:
413 413 return False
414 414 self.t.show(
415 415 ctx,
416 416 changes=ctx.changeset(),
417 417 baseurl=self.ui.config(b'web', b'baseurl'),
418 418 root=self.repo.root,
419 419 webroot=self.root,
420 420 **props
421 421 )
422 422 return True
423 423
424 424 def skipsource(self, source):
425 425 '''true if incoming changes from this source should be skipped.'''
426 426 ok_sources = self.ui.config(b'notify', b'sources').split()
427 427 return source not in ok_sources
428 428
429 429 def send(self, ctx, count, data):
430 430 '''send message.'''
431 431
432 432 # Select subscribers by revset
433 433 subs = set()
434 434 for sub, spec in self.subs:
435 435 if spec is None:
436 436 subs.add(sub)
437 437 continue
438 revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
438 try:
439 revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
440 except error.RepoLookupError:
441 continue
439 442 if len(revs):
440 443 subs.add(sub)
441 444 continue
442 445 if len(subs) == 0:
443 446 self.ui.debug(
444 447 b'notify: no subscribers to selected repo and revset\n'
445 448 )
446 449 return
447 450
448 451 try:
449 452 msg = mail.parsebytes(data)
450 453 except emailerrors.MessageParseError as inst:
451 454 raise error.Abort(inst)
452 455
453 456 # store sender and subject
454 457 sender = msg['From']
455 458 subject = msg['Subject']
456 459 if sender is not None:
457 460 sender = mail.headdecode(sender)
458 461 if subject is not None:
459 462 subject = mail.headdecode(subject)
460 463 del msg['From'], msg['Subject']
461 464
462 465 if not msg.is_multipart():
463 466 # create fresh mime message from scratch
464 467 # (multipart templates must take care of this themselves)
465 468 headers = msg.items()
466 469 payload = msg.get_payload(decode=pycompat.ispy3)
467 470 # for notification prefer readability over data precision
468 471 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
469 472 # reinstate custom headers
470 473 for k, v in headers:
471 474 msg[k] = v
472 475
473 476 msg['Date'] = encoding.strfromlocal(
474 477 dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
475 478 )
476 479
477 480 # try to make subject line exist and be useful
478 481 if not subject:
479 482 if count > 1:
480 483 subject = _(b'%s: %d new changesets') % (self.root, count)
481 484 else:
482 485 s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
483 486 subject = b'%s: %s' % (self.root, s)
484 487 maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
485 488 if maxsubject:
486 489 subject = stringutil.ellipsis(subject, maxsubject)
487 490 msg['Subject'] = mail.headencode(
488 491 self.ui, subject, self.charsets, self.test
489 492 )
490 493
491 494 # try to make message have proper sender
492 495 if not sender:
493 496 sender = self.ui.config(b'email', b'from') or self.ui.username()
494 497 if b'@' not in sender or b'@localhost' in sender:
495 498 sender = self.fixmail(sender)
496 499 msg['From'] = mail.addressencode(
497 500 self.ui, sender, self.charsets, self.test
498 501 )
499 502
500 503 msg['X-Hg-Notification'] = 'changeset %s' % ctx
501 504 if not msg['Message-Id']:
502 505 msg['Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
503 506 if self.reply:
504 507 unfi = self.repo.unfiltered()
505 508 has_node = unfi.changelog.index.has_node
506 509 predecessors = [
507 510 unfi[ctx2]
508 511 for ctx2 in obsutil.allpredecessors(unfi.obsstore, [ctx.node()])
509 512 if ctx2 != ctx.node() and has_node(ctx2)
510 513 ]
511 514 if predecessors:
512 515 # There is at least one predecessor, so which to pick?
513 516 # Ideally, there is a unique root because changesets have
514 517 # been evolved/rebased one step at a time. In this case,
515 518 # just picking the oldest known changeset provides a stable
516 519 # base. It doesn't help when changesets are folded. Any
517 520 # better solution would require storing more information
518 521 # in the repository.
519 522 pred = min(predecessors, key=lambda ctx: ctx.rev())
520 523 msg['In-Reply-To'] = messageid(
521 524 pred, self.domain, self.messageidseed
522 525 )
523 526 msg['To'] = ', '.join(sorted(subs))
524 527
525 528 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
526 529 if self.test:
527 530 self.ui.write(msgtext)
528 531 if not msgtext.endswith(b'\n'):
529 532 self.ui.write(b'\n')
530 533 else:
531 534 self.ui.status(
532 535 _(b'notify: sending %d subscribers %d changes\n')
533 536 % (len(subs), count)
534 537 )
535 538 mail.sendmail(
536 539 self.ui,
537 540 emailutils.parseaddr(msg['From'])[1],
538 541 subs,
539 542 msgtext,
540 543 mbox=self.mbox,
541 544 )
542 545
543 546 def diff(self, ctx, ref=None):
544 547
545 548 maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
546 549 prev = ctx.p1().node()
547 550 if ref:
548 551 ref = ref.node()
549 552 else:
550 553 ref = ctx.node()
551 554 diffopts = patch.diffallopts(self.ui)
552 555 diffopts.showfunc = self.showfunc
553 556 chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
554 557 difflines = b''.join(chunks).splitlines()
555 558
556 559 if self.ui.configbool(b'notify', b'diffstat'):
557 560 maxdiffstat = int(self.ui.config(b'notify', b'maxdiffstat'))
558 561 s = patch.diffstat(difflines)
559 562 # s may be nil, don't include the header if it is
560 563 if s:
561 564 if maxdiffstat >= 0 and s.count(b"\n") > maxdiffstat + 1:
562 565 s = s.split(b"\n")
563 566 msg = _(b'\ndiffstat (truncated from %d to %d lines):\n\n')
564 567 self.ui.write(msg % (len(s) - 2, maxdiffstat))
565 568 self.ui.write(b"\n".join(s[:maxdiffstat] + s[-2:]))
566 569 else:
567 570 self.ui.write(_(b'\ndiffstat:\n\n%s') % s)
568 571
569 572 if maxdiff == 0:
570 573 return
571 574 elif maxdiff > 0 and len(difflines) > maxdiff:
572 575 msg = _(b'\ndiffs (truncated from %d to %d lines):\n\n')
573 576 self.ui.write(msg % (len(difflines), maxdiff))
574 577 difflines = difflines[:maxdiff]
575 578 elif difflines:
576 579 self.ui.write(_(b'\ndiffs (%d lines):\n\n') % len(difflines))
577 580
578 581 self.ui.write(b"\n".join(difflines))
579 582
580 583
581 584 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
582 585 """send email notifications to interested subscribers.
583 586
584 587 if used as changegroup hook, send one email for all changesets in
585 588 changegroup. else send one email per changeset."""
586 589
587 590 n = notifier(ui, repo, hooktype)
588 591 ctx = repo.unfiltered()[node]
589 592
590 593 if not n.subs:
591 594 ui.debug(b'notify: no subscribers to repository %s\n' % n.root)
592 595 return
593 596 if n.skipsource(source):
594 597 ui.debug(b'notify: changes have source "%s" - skipping\n' % source)
595 598 return
596 599
597 600 ui.pushbuffer()
598 601 data = b''
599 602 count = 0
600 603 author = b''
601 604 if hooktype == b'changegroup' or hooktype == b'outgoing':
602 605 for rev in repo.changelog.revs(start=ctx.rev()):
603 606 if n.node(repo[rev]):
604 607 count += 1
605 608 if not author:
606 609 author = repo[rev].user()
607 610 else:
608 611 data += ui.popbuffer()
609 612 ui.note(
610 613 _(b'notify: suppressing notification for merge %d:%s\n')
611 614 % (rev, repo[rev].hex()[:12])
612 615 )
613 616 ui.pushbuffer()
614 617 if count:
615 618 n.diff(ctx, repo[b'tip'])
616 619 elif ctx.rev() in repo:
617 620 if not n.node(ctx):
618 621 ui.popbuffer()
619 622 ui.note(
620 623 _(b'notify: suppressing notification for merge %d:%s\n')
621 624 % (ctx.rev(), ctx.hex()[:12])
622 625 )
623 626 return
624 627 count += 1
625 628 n.diff(ctx)
626 629 if not author:
627 630 author = ctx.user()
628 631
629 632 data += ui.popbuffer()
630 633 fromauthor = ui.config(b'notify', b'fromauthor')
631 634 if author and fromauthor:
632 635 data = b'\n'.join([b'From: %s' % author, data])
633 636
634 637 if count:
635 638 n.send(ctx, count, data)
636 639
637 640
638 641 def messageid(ctx, domain, messageidseed):
639 642 if domain and messageidseed:
640 643 host = domain
641 644 else:
642 645 host = encoding.strtolocal(socket.getfqdn())
643 646 if messageidseed:
644 647 messagehash = hashlib.sha512(ctx.hex() + messageidseed)
645 648 messageid = b'<hg.%s@%s>' % (
646 649 pycompat.sysbytes(messagehash.hexdigest()[:64]),
647 650 host,
648 651 )
649 652 else:
650 653 messageid = b'<hg.%s.%d.%d@%s>' % (
651 654 ctx,
652 655 int(time.time()),
653 656 hash(ctx.repo().root),
654 657 host,
655 658 )
656 659 return encoding.strfromlocal(messageid)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from tests/badserverext.py to tests/testlib/badserverext.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now