##// END OF EJS Templates
merge with stable
Augie Fackler -
r44858:f5c00662 merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

This diff has been collapsed as it changes many lines, (3411 lines changed) Show them Hide them
@@ -0,0 +1,3411 b''
1 {
2 "version": 1,
3 "interactions": [
4 {
5 "response": {
6 "headers": {
7 "date": [
8 "Tue, 18 Feb 2020 18:43:11 GMT"
9 ],
10 "strict-transport-security": [
11 "max-age=0; includeSubdomains; preload"
12 ],
13 "x-frame-options": [
14 "Deny"
15 ],
16 "content-type": [
17 "application/json"
18 ],
19 "transfer-encoding": [
20 "chunked"
21 ],
22 "x-xss-protection": [
23 "1; mode=block"
24 ],
25 "expires": [
26 "Sat, 01 Jan 2000 00:00:00 GMT"
27 ],
28 "x-content-type-options": [
29 "nosniff"
30 ],
31 "referrer-policy": [
32 "no-referrer"
33 ],
34 "server": [
35 "Apache/2.4.10 (Debian)"
36 ],
37 "cache-control": [
38 "no-store"
39 ]
40 },
41 "status": {
42 "code": 200,
43 "message": "OK"
44 },
45 "body": {
46 "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
47 }
48 },
49 "request": {
50 "headers": {
51 "content-length": [
52 "183"
53 ],
54 "host": [
55 "phab.mercurial-scm.org"
56 ],
57 "content-type": [
58 "application/x-www-form-urlencoded"
59 ],
60 "user-agent": [
61 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
62 ],
63 "accept": [
64 "application/mercurial-0.1"
65 ]
66 },
67 "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
68 "method": "POST",
69 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D"
70 }
71 },
72 {
73 "response": {
74 "headers": {
75 "date": [
76 "Tue, 18 Feb 2020 18:43:11 GMT"
77 ],
78 "strict-transport-security": [
79 "max-age=0; includeSubdomains; preload"
80 ],
81 "x-frame-options": [
82 "Deny"
83 ],
84 "content-type": [
85 "application/json"
86 ],
87 "transfer-encoding": [
88 "chunked"
89 ],
90 "x-xss-protection": [
91 "1; mode=block"
92 ],
93 "expires": [
94 "Sat, 01 Jan 2000 00:00:00 GMT"
95 ],
96 "x-content-type-options": [
97 "nosniff"
98 ],
99 "referrer-policy": [
100 "no-referrer"
101 ],
102 "server": [
103 "Apache/2.4.10 (Debian)"
104 ],
105 "cache-control": [
106 "no-store"
107 ]
108 },
109 "status": {
110 "code": 200,
111 "message": "OK"
112 },
113 "body": {
114 "string": "{\"result\":{\"upload\":true,\"filePHID\":null},\"error_code\":null,\"error_info\":null}"
115 }
116 },
117 "request": {
118 "headers": {
119 "content-length": [
120 "270"
121 ],
122 "host": [
123 "phab.mercurial-scm.org"
124 ],
125 "content-type": [
126 "application/x-www-form-urlencoded"
127 ],
128 "user-agent": [
129 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
130 ],
131 "accept": [
132 "application/mercurial-0.1"
133 ]
134 },
135 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
136 "method": "POST",
137 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2%22%7D"
138 }
139 },
140 {
141 "response": {
142 "headers": {
143 "date": [
144 "Tue, 18 Feb 2020 18:43:12 GMT"
145 ],
146 "strict-transport-security": [
147 "max-age=0; includeSubdomains; preload"
148 ],
149 "x-frame-options": [
150 "Deny"
151 ],
152 "content-type": [
153 "application/json"
154 ],
155 "transfer-encoding": [
156 "chunked"
157 ],
158 "x-xss-protection": [
159 "1; mode=block"
160 ],
161 "expires": [
162 "Sat, 01 Jan 2000 00:00:00 GMT"
163 ],
164 "x-content-type-options": [
165 "nosniff"
166 ],
167 "referrer-policy": [
168 "no-referrer"
169 ],
170 "server": [
171 "Apache/2.4.10 (Debian)"
172 ],
173 "cache-control": [
174 "no-store"
175 ]
176 },
177 "status": {
178 "code": 200,
179 "message": "OK"
180 },
181 "body": {
182 "string": "{\"result\":\"PHID-FILE-6c77dv6moq5rt5hkiauh\",\"error_code\":null,\"error_info\":null}"
183 }
184 },
185 "request": {
186 "headers": {
187 "content-length": [
188 "184"
189 ],
190 "host": [
191 "phab.mercurial-scm.org"
192 ],
193 "content-type": [
194 "application/x-www-form-urlencoded"
195 ],
196 "user-agent": [
197 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
198 ],
199 "accept": [
200 "application/mercurial-0.1"
201 ]
202 },
203 "uri": "https://phab.mercurial-scm.org//api/file.upload",
204 "method": "POST",
205 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data_base64%22%3A+%22AGM%3D%22%2C+%22name%22%3A+%22bin2%22%7D"
206 }
207 },
208 {
209 "response": {
210 "headers": {
211 "date": [
212 "Tue, 18 Feb 2020 18:43:12 GMT"
213 ],
214 "strict-transport-security": [
215 "max-age=0; includeSubdomains; preload"
216 ],
217 "x-frame-options": [
218 "Deny"
219 ],
220 "content-type": [
221 "application/json"
222 ],
223 "transfer-encoding": [
224 "chunked"
225 ],
226 "x-xss-protection": [
227 "1; mode=block"
228 ],
229 "connection": [
230 "close"
231 ],
232 "expires": [
233 "Sat, 01 Jan 2000 00:00:00 GMT"
234 ],
235 "x-content-type-options": [
236 "nosniff"
237 ],
238 "referrer-policy": [
239 "no-referrer"
240 ],
241 "server": [
242 "Apache/2.4.10 (Debian)"
243 ],
244 "cache-control": [
245 "no-store"
246 ]
247 },
248 "status": {
249 "code": 200,
250 "message": "OK"
251 },
252 "body": {
253 "string": "{\"result\":{\"diffid\":20252,\"phid\":\"PHID-DIFF-haue3qqytoovnmb7orw6\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/20252\\/\"},\"error_code\":null,\"error_info\":null}"
254 }
255 },
256 "request": {
257 "headers": {
258 "content-length": [
259 "1083"
260 ],
261 "host": [
262 "phab.mercurial-scm.org"
263 ],
264 "content-type": [
265 "application/x-www-form-urlencoded"
266 ],
267 "user-agent": [
268 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
269 ],
270 "accept": [
271 "application/mercurial-0.1"
272 ]
273 },
274 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
275 "method": "POST",
276 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22bin2%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+3%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-6c77dv6moq5rt5hkiauh%22%2C+%22new%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%22unix%3Afilemode%22%3A+%22100644%22%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+1%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2275dbbc901145d7beb190197aa232f74540e5a9f3%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D"
277 }
278 },
279 {
280 "response": {
281 "headers": {
282 "date": [
283 "Tue, 18 Feb 2020 18:43:13 GMT"
284 ],
285 "strict-transport-security": [
286 "max-age=0; includeSubdomains; preload"
287 ],
288 "x-frame-options": [
289 "Deny"
290 ],
291 "content-type": [
292 "application/json"
293 ],
294 "transfer-encoding": [
295 "chunked"
296 ],
297 "x-xss-protection": [
298 "1; mode=block"
299 ],
300 "expires": [
301 "Sat, 01 Jan 2000 00:00:00 GMT"
302 ],
303 "x-content-type-options": [
304 "nosniff"
305 ],
306 "referrer-policy": [
307 "no-referrer"
308 ],
309 "server": [
310 "Apache/2.4.10 (Debian)"
311 ],
312 "cache-control": [
313 "no-store"
314 ]
315 },
316 "status": {
317 "code": 200,
318 "message": "OK"
319 },
320 "body": {
321 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
322 }
323 },
324 "request": {
325 "headers": {
326 "content-length": [
327 "482"
328 ],
329 "host": [
330 "phab.mercurial-scm.org"
331 ],
332 "content-type": [
333 "application/x-www-form-urlencoded"
334 ],
335 "user-agent": [
336 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
337 ],
338 "accept": [
339 "application/mercurial-0.1"
340 ]
341 },
342 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
343 "method": "POST",
344 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2275dbbc901145d7beb190197aa232f74540e5a9f3%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20252%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
345 }
346 },
347 {
348 "response": {
349 "headers": {
350 "date": [
351 "Tue, 18 Feb 2020 18:43:13 GMT"
352 ],
353 "strict-transport-security": [
354 "max-age=0; includeSubdomains; preload"
355 ],
356 "x-frame-options": [
357 "Deny"
358 ],
359 "content-type": [
360 "application/json"
361 ],
362 "transfer-encoding": [
363 "chunked"
364 ],
365 "x-xss-protection": [
366 "1; mode=block"
367 ],
368 "expires": [
369 "Sat, 01 Jan 2000 00:00:00 GMT"
370 ],
371 "x-content-type-options": [
372 "nosniff"
373 ],
374 "referrer-policy": [
375 "no-referrer"
376 ],
377 "server": [
378 "Apache/2.4.10 (Debian)"
379 ],
380 "cache-control": [
381 "no-store"
382 ]
383 },
384 "status": {
385 "code": 200,
386 "message": "OK"
387 },
388 "body": {
389 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
390 }
391 },
392 "request": {
393 "headers": {
394 "content-length": [
395 "594"
396 ],
397 "host": [
398 "phab.mercurial-scm.org"
399 ],
400 "content-type": [
401 "application/x-www-form-urlencoded"
402 ],
403 "user-agent": [
404 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
405 ],
406 "accept": [
407 "application/mercurial-0.1"
408 ]
409 },
410 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
411 "method": "POST",
412 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2275dbbc901145d7beb190197aa232f74540e5a9f3%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20252%2C+%22name%22%3A+%22local%3Acommits%22%7D"
413 }
414 },
415 {
416 "response": {
417 "headers": {
418 "date": [
419 "Tue, 18 Feb 2020 18:43:14 GMT"
420 ],
421 "strict-transport-security": [
422 "max-age=0; includeSubdomains; preload"
423 ],
424 "x-frame-options": [
425 "Deny"
426 ],
427 "content-type": [
428 "application/json"
429 ],
430 "transfer-encoding": [
431 "chunked"
432 ],
433 "x-xss-protection": [
434 "1; mode=block"
435 ],
436 "expires": [
437 "Sat, 01 Jan 2000 00:00:00 GMT"
438 ],
439 "x-content-type-options": [
440 "nosniff"
441 ],
442 "referrer-policy": [
443 "no-referrer"
444 ],
445 "server": [
446 "Apache/2.4.10 (Debian)"
447 ],
448 "cache-control": [
449 "no-store"
450 ]
451 },
452 "status": {
453 "code": 200,
454 "message": "OK"
455 },
456 "body": {
457 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"add another binary\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"add another binary\"}]},\"error_code\":null,\"error_info\":null}"
458 }
459 },
460 "request": {
461 "headers": {
462 "content-length": [
463 "163"
464 ],
465 "host": [
466 "phab.mercurial-scm.org"
467 ],
468 "content-type": [
469 "application/x-www-form-urlencoded"
470 ],
471 "user-agent": [
472 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
473 ],
474 "accept": [
475 "application/mercurial-0.1"
476 ]
477 },
478 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
479 "method": "POST",
480 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22add+another+binary%22%7D"
481 }
482 },
483 {
484 "response": {
485 "headers": {
486 "date": [
487 "Tue, 18 Feb 2020 18:43:14 GMT"
488 ],
489 "strict-transport-security": [
490 "max-age=0; includeSubdomains; preload"
491 ],
492 "x-frame-options": [
493 "Deny"
494 ],
495 "content-type": [
496 "application/json"
497 ],
498 "transfer-encoding": [
499 "chunked"
500 ],
501 "x-xss-protection": [
502 "1; mode=block"
503 ],
504 "expires": [
505 "Sat, 01 Jan 2000 00:00:00 GMT"
506 ],
507 "x-content-type-options": [
508 "nosniff"
509 ],
510 "referrer-policy": [
511 "no-referrer"
512 ],
513 "server": [
514 "Apache/2.4.10 (Debian)"
515 ],
516 "cache-control": [
517 "no-store"
518 ]
519 },
520 "status": {
521 "code": 200,
522 "message": "OK"
523 },
524 "body": {
525 "string": "{\"result\":{\"object\":{\"id\":8128,\"phid\":\"PHID-DREV-ebpbxa27h5ibeudclsse\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-m6rt6hdyxzkl6nv\"},{\"phid\":\"PHID-XACT-DREV-degiee33gjgtg23\"},{\"phid\":\"PHID-XACT-DREV-6oz5qcrlwerhhxw\"},{\"phid\":\"PHID-XACT-DREV-pm4rroqrqehsnau\"},{\"phid\":\"PHID-XACT-DREV-ncnb2jnrhtearld\"}]},\"error_code\":null,\"error_info\":null}"
526 }
527 },
528 "request": {
529 "headers": {
530 "content-length": [
531 "316"
532 ],
533 "host": [
534 "phab.mercurial-scm.org"
535 ],
536 "content-type": [
537 "application/x-www-form-urlencoded"
538 ],
539 "user-agent": [
540 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
541 ],
542 "accept": [
543 "application/mercurial-0.1"
544 ]
545 },
546 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
547 "method": "POST",
548 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-haue3qqytoovnmb7orw6%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22add+another+binary%22%7D%5D%7D"
549 }
550 },
551 {
552 "response": {
553 "headers": {
554 "date": [
555 "Tue, 18 Feb 2020 18:43:15 GMT"
556 ],
557 "strict-transport-security": [
558 "max-age=0; includeSubdomains; preload"
559 ],
560 "x-frame-options": [
561 "Deny"
562 ],
563 "content-type": [
564 "application/json"
565 ],
566 "transfer-encoding": [
567 "chunked"
568 ],
569 "x-xss-protection": [
570 "1; mode=block"
571 ],
572 "expires": [
573 "Sat, 01 Jan 2000 00:00:00 GMT"
574 ],
575 "x-content-type-options": [
576 "nosniff"
577 ],
578 "referrer-policy": [
579 "no-referrer"
580 ],
581 "server": [
582 "Apache/2.4.10 (Debian)"
583 ],
584 "cache-control": [
585 "no-store"
586 ]
587 },
588 "status": {
589 "code": 200,
590 "message": "OK"
591 },
592 "body": {
593 "string": "{\"result\":{\"upload\":false,\"filePHID\":\"PHID-FILE-wybozuu6uch7gezkw6pe\"},\"error_code\":null,\"error_info\":null}"
594 }
595 },
596 "request": {
597 "headers": {
598 "content-length": [
599 "276"
600 ],
601 "host": [
602 "phab.mercurial-scm.org"
603 ],
604 "content-type": [
605 "application/x-www-form-urlencoded"
606 ],
607 "user-agent": [
608 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
609 ],
610 "accept": [
611 "application/mercurial-0.1"
612 ]
613 },
614 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
615 "method": "POST",
616 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2_moved%22%7D"
617 }
618 },
619 {
620 "response": {
621 "headers": {
622 "date": [
623 "Tue, 18 Feb 2020 18:43:15 GMT"
624 ],
625 "strict-transport-security": [
626 "max-age=0; includeSubdomains; preload"
627 ],
628 "x-frame-options": [
629 "Deny"
630 ],
631 "content-type": [
632 "application/json"
633 ],
634 "transfer-encoding": [
635 "chunked"
636 ],
637 "x-xss-protection": [
638 "1; mode=block"
639 ],
640 "expires": [
641 "Sat, 01 Jan 2000 00:00:00 GMT"
642 ],
643 "x-content-type-options": [
644 "nosniff"
645 ],
646 "referrer-policy": [
647 "no-referrer"
648 ],
649 "server": [
650 "Apache/2.4.10 (Debian)"
651 ],
652 "cache-control": [
653 "no-store"
654 ]
655 },
656 "status": {
657 "code": 200,
658 "message": "OK"
659 },
660 "body": {
661 "string": "{\"result\":{\"diffid\":20253,\"phid\":\"PHID-DIFF-svria2kxhgr63qwpdzzb\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/20253\\/\"},\"error_code\":null,\"error_info\":null}"
662 }
663 },
664 "request": {
665 "headers": {
666 "content-length": [
667 "1529"
668 ],
669 "host": [
670 "phab.mercurial-scm.org"
671 ],
672 "content-type": [
673 "application/x-www-form-urlencoded"
674 ],
675 "user-agent": [
676 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
677 ],
678 "accept": [
679 "application/mercurial-0.1"
680 ]
681 },
682 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
683 "method": "POST",
684 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22bin2%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%22bin2_moved%22%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+4%7D%2C+%22bin2_moved%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_moved%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-wybozuu6uch7gezkw6pe%22%2C+%22new%3Afile%3Asize%22%3A+2%2C+%22old%3Abinary-phid%22%3A+%22PHID-FILE-wybozuu6uch7gezkw6pe%22%2C+%22old%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+6%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D"
685 }
686 },
687 {
688 "response": {
689 "headers": {
690 "date": [
691 "Tue, 18 Feb 2020 18:43:16 GMT"
692 ],
693 "strict-transport-security": [
694 "max-age=0; includeSubdomains; preload"
695 ],
696 "x-frame-options": [
697 "Deny"
698 ],
699 "content-type": [
700 "application/json"
701 ],
702 "transfer-encoding": [
703 "chunked"
704 ],
705 "x-xss-protection": [
706 "1; mode=block"
707 ],
708 "expires": [
709 "Sat, 01 Jan 2000 00:00:00 GMT"
710 ],
711 "x-content-type-options": [
712 "nosniff"
713 ],
714 "referrer-policy": [
715 "no-referrer"
716 ],
717 "server": [
718 "Apache/2.4.10 (Debian)"
719 ],
720 "cache-control": [
721 "no-store"
722 ]
723 },
724 "status": {
725 "code": 200,
726 "message": "OK"
727 },
728 "body": {
729 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
730 }
731 },
732 "request": {
733 "headers": {
734 "content-length": [
735 "482"
736 ],
737 "host": [
738 "phab.mercurial-scm.org"
739 ],
740 "content-type": [
741 "application/x-www-form-urlencoded"
742 ],
743 "user-agent": [
744 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
745 ],
746 "accept": [
747 "application/mercurial-0.1"
748 ]
749 },
750 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
751 "method": "POST",
752 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22834ab31d80aede3f92435c26366095c3518dc5af%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20253%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
753 }
754 },
755 {
756 "response": {
757 "headers": {
758 "date": [
759 "Tue, 18 Feb 2020 18:43:16 GMT"
760 ],
761 "strict-transport-security": [
762 "max-age=0; includeSubdomains; preload"
763 ],
764 "x-frame-options": [
765 "Deny"
766 ],
767 "content-type": [
768 "application/json"
769 ],
770 "transfer-encoding": [
771 "chunked"
772 ],
773 "x-xss-protection": [
774 "1; mode=block"
775 ],
776 "expires": [
777 "Sat, 01 Jan 2000 00:00:00 GMT"
778 ],
779 "x-content-type-options": [
780 "nosniff"
781 ],
782 "referrer-policy": [
783 "no-referrer"
784 ],
785 "server": [
786 "Apache/2.4.10 (Debian)"
787 ],
788 "cache-control": [
789 "no-store"
790 ]
791 },
792 "status": {
793 "code": 200,
794 "message": "OK"
795 },
796 "body": {
797 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
798 }
799 },
800 "request": {
801 "headers": {
802 "content-length": [
803 "594"
804 ],
805 "host": [
806 "phab.mercurial-scm.org"
807 ],
808 "content-type": [
809 "application/x-www-form-urlencoded"
810 ],
811 "user-agent": [
812 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
813 ],
814 "accept": [
815 "application/mercurial-0.1"
816 ]
817 },
818 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
819 "method": "POST",
820 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22834ab31d80aede3f92435c26366095c3518dc5af%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22834ab31d80aede3f92435c26366095c3518dc5af%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22f42f9195e00ce1b8e9513ea704652a6787b57ce6%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20253%2C+%22name%22%3A+%22local%3Acommits%22%7D"
821 }
822 },
823 {
824 "response": {
825 "headers": {
826 "date": [
827 "Tue, 18 Feb 2020 18:43:17 GMT"
828 ],
829 "strict-transport-security": [
830 "max-age=0; includeSubdomains; preload"
831 ],
832 "x-frame-options": [
833 "Deny"
834 ],
835 "content-type": [
836 "application/json"
837 ],
838 "transfer-encoding": [
839 "chunked"
840 ],
841 "x-xss-protection": [
842 "1; mode=block"
843 ],
844 "expires": [
845 "Sat, 01 Jan 2000 00:00:00 GMT"
846 ],
847 "x-content-type-options": [
848 "nosniff"
849 ],
850 "referrer-policy": [
851 "no-referrer"
852 ],
853 "server": [
854 "Apache/2.4.10 (Debian)"
855 ],
856 "cache-control": [
857 "no-store"
858 ]
859 },
860 "status": {
861 "code": 200,
862 "message": "OK"
863 },
864 "body": {
865 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"moved binary\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"moved binary\"}]},\"error_code\":null,\"error_info\":null}"
866 }
867 },
868 "request": {
869 "headers": {
870 "content-length": [
871 "157"
872 ],
873 "host": [
874 "phab.mercurial-scm.org"
875 ],
876 "content-type": [
877 "application/x-www-form-urlencoded"
878 ],
879 "user-agent": [
880 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
881 ],
882 "accept": [
883 "application/mercurial-0.1"
884 ]
885 },
886 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
887 "method": "POST",
888 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22moved+binary%22%7D"
889 }
890 },
891 {
892 "response": {
893 "headers": {
894 "date": [
895 "Tue, 18 Feb 2020 18:43:17 GMT"
896 ],
897 "strict-transport-security": [
898 "max-age=0; includeSubdomains; preload"
899 ],
900 "x-frame-options": [
901 "Deny"
902 ],
903 "content-type": [
904 "application/json"
905 ],
906 "transfer-encoding": [
907 "chunked"
908 ],
909 "x-xss-protection": [
910 "1; mode=block"
911 ],
912 "expires": [
913 "Sat, 01 Jan 2000 00:00:00 GMT"
914 ],
915 "x-content-type-options": [
916 "nosniff"
917 ],
918 "referrer-policy": [
919 "no-referrer"
920 ],
921 "server": [
922 "Apache/2.4.10 (Debian)"
923 ],
924 "cache-control": [
925 "no-store"
926 ]
927 },
928 "status": {
929 "code": 200,
930 "message": "OK"
931 },
932 "body": {
933 "string": "{\"result\":{\"object\":{\"id\":8129,\"phid\":\"PHID-DREV-s74qwcmdszxugly5fjru\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-a36lldjdtmtyks2\"},{\"phid\":\"PHID-XACT-DREV-xq32hvqubr4vi6u\"},{\"phid\":\"PHID-XACT-DREV-t7xt2kxu2u3ld2s\"},{\"phid\":\"PHID-XACT-DREV-kzygpxqiccpph4u\"},{\"phid\":\"PHID-XACT-DREV-vhr6fdqah3gnmnn\"},{\"phid\":\"PHID-XACT-DREV-ghlyvkgloeskuqg\"}]},\"error_code\":null,\"error_info\":null}"
934 }
935 },
936 "request": {
937 "headers": {
938 "content-length": [
939 "412"
940 ],
941 "host": [
942 "phab.mercurial-scm.org"
943 ],
944 "content-type": [
945 "application/x-www-form-urlencoded"
946 ],
947 "user-agent": [
948 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
949 ],
950 "accept": [
951 "application/mercurial-0.1"
952 ]
953 },
954 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
955 "method": "POST",
956 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-svria2kxhgr63qwpdzzb%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-ebpbxa27h5ibeudclsse%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22moved+binary%22%7D%5D%7D"
957 }
958 },
959 {
960 "response": {
961 "headers": {
962 "date": [
963 "Tue, 18 Feb 2020 18:43:18 GMT"
964 ],
965 "strict-transport-security": [
966 "max-age=0; includeSubdomains; preload"
967 ],
968 "x-frame-options": [
969 "Deny"
970 ],
971 "content-type": [
972 "application/json"
973 ],
974 "transfer-encoding": [
975 "chunked"
976 ],
977 "x-xss-protection": [
978 "1; mode=block"
979 ],
980 "expires": [
981 "Sat, 01 Jan 2000 00:00:00 GMT"
982 ],
983 "x-content-type-options": [
984 "nosniff"
985 ],
986 "referrer-policy": [
987 "no-referrer"
988 ],
989 "server": [
990 "Apache/2.4.10 (Debian)"
991 ],
992 "cache-control": [
993 "no-store"
994 ]
995 },
996 "status": {
997 "code": 200,
998 "message": "OK"
999 },
1000 "body": {
1001 "string": "{\"result\":{\"upload\":false,\"filePHID\":\"PHID-FILE-wodobo7epta7trv3vdz2\"},\"error_code\":null,\"error_info\":null}"
1002 }
1003 },
1004 "request": {
1005 "headers": {
1006 "content-length": [
1007 "277"
1008 ],
1009 "host": [
1010 "phab.mercurial-scm.org"
1011 ],
1012 "content-type": [
1013 "application/x-www-form-urlencoded"
1014 ],
1015 "user-agent": [
1016 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1017 ],
1018 "accept": [
1019 "application/mercurial-0.1"
1020 ]
1021 },
1022 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
1023 "method": "POST",
1024 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2_copied%22%7D"
1025 }
1026 },
1027 {
1028 "response": {
1029 "headers": {
1030 "date": [
1031 "Tue, 18 Feb 2020 18:43:18 GMT"
1032 ],
1033 "strict-transport-security": [
1034 "max-age=0; includeSubdomains; preload"
1035 ],
1036 "x-frame-options": [
1037 "Deny"
1038 ],
1039 "content-type": [
1040 "application/json"
1041 ],
1042 "transfer-encoding": [
1043 "chunked"
1044 ],
1045 "x-xss-protection": [
1046 "1; mode=block"
1047 ],
1048 "expires": [
1049 "Sat, 01 Jan 2000 00:00:00 GMT"
1050 ],
1051 "x-content-type-options": [
1052 "nosniff"
1053 ],
1054 "referrer-policy": [
1055 "no-referrer"
1056 ],
1057 "server": [
1058 "Apache/2.4.10 (Debian)"
1059 ],
1060 "cache-control": [
1061 "no-store"
1062 ]
1063 },
1064 "status": {
1065 "code": 200,
1066 "message": "OK"
1067 },
1068 "body": {
1069 "string": "{\"result\":{\"diffid\":20254,\"phid\":\"PHID-DIFF-g7zeghdg2dzimfldfk5b\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/20254\\/\"},\"error_code\":null,\"error_info\":null}"
1070 }
1071 },
1072 "request": {
1073 "headers": {
1074 "content-length": [
1075 "1544"
1076 ],
1077 "host": [
1078 "phab.mercurial-scm.org"
1079 ],
1080 "content-type": [
1081 "application/x-www-form-urlencoded"
1082 ],
1083 "user-agent": [
1084 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1085 ],
1086 "accept": [
1087 "application/mercurial-0.1"
1088 ]
1089 },
1090 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
1091 "method": "POST",
1092 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22bin2_copied%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_copied%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-wodobo7epta7trv3vdz2%22%2C+%22new%3Afile%3Asize%22%3A+2%2C+%22old%3Abinary-phid%22%3A+%22PHID-FILE-wodobo7epta7trv3vdz2%22%2C+%22old%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2_moved%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+7%7D%2C+%22bin2_moved%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%22bin2_copied%22%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_moved%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+null%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+5%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22834ab31d80aede3f92435c26366095c3518dc5af%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D"
1093 }
1094 },
1095 {
1096 "response": {
1097 "headers": {
1098 "date": [
1099 "Tue, 18 Feb 2020 18:43:19 GMT"
1100 ],
1101 "strict-transport-security": [
1102 "max-age=0; includeSubdomains; preload"
1103 ],
1104 "x-frame-options": [
1105 "Deny"
1106 ],
1107 "content-type": [
1108 "application/json"
1109 ],
1110 "transfer-encoding": [
1111 "chunked"
1112 ],
1113 "x-xss-protection": [
1114 "1; mode=block"
1115 ],
1116 "expires": [
1117 "Sat, 01 Jan 2000 00:00:00 GMT"
1118 ],
1119 "x-content-type-options": [
1120 "nosniff"
1121 ],
1122 "referrer-policy": [
1123 "no-referrer"
1124 ],
1125 "server": [
1126 "Apache/2.4.10 (Debian)"
1127 ],
1128 "cache-control": [
1129 "no-store"
1130 ]
1131 },
1132 "status": {
1133 "code": 200,
1134 "message": "OK"
1135 },
1136 "body": {
1137 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1138 }
1139 },
1140 "request": {
1141 "headers": {
1142 "content-length": [
1143 "482"
1144 ],
1145 "host": [
1146 "phab.mercurial-scm.org"
1147 ],
1148 "content-type": [
1149 "application/x-www-form-urlencoded"
1150 ],
1151 "user-agent": [
1152 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1153 ],
1154 "accept": [
1155 "application/mercurial-0.1"
1156 ]
1157 },
1158 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1159 "method": "POST",
1160 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22494b750e5194bb08c1de04d6b292e28273afa18c%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22834ab31d80aede3f92435c26366095c3518dc5af%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20254%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
1161 }
1162 },
1163 {
1164 "response": {
1165 "headers": {
1166 "date": [
1167 "Tue, 18 Feb 2020 18:43:19 GMT"
1168 ],
1169 "strict-transport-security": [
1170 "max-age=0; includeSubdomains; preload"
1171 ],
1172 "x-frame-options": [
1173 "Deny"
1174 ],
1175 "content-type": [
1176 "application/json"
1177 ],
1178 "transfer-encoding": [
1179 "chunked"
1180 ],
1181 "x-xss-protection": [
1182 "1; mode=block"
1183 ],
1184 "expires": [
1185 "Sat, 01 Jan 2000 00:00:00 GMT"
1186 ],
1187 "x-content-type-options": [
1188 "nosniff"
1189 ],
1190 "referrer-policy": [
1191 "no-referrer"
1192 ],
1193 "server": [
1194 "Apache/2.4.10 (Debian)"
1195 ],
1196 "cache-control": [
1197 "no-store"
1198 ]
1199 },
1200 "status": {
1201 "code": 200,
1202 "message": "OK"
1203 },
1204 "body": {
1205 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1206 }
1207 },
1208 "request": {
1209 "headers": {
1210 "content-length": [
1211 "594"
1212 ],
1213 "host": [
1214 "phab.mercurial-scm.org"
1215 ],
1216 "content-type": [
1217 "application/x-www-form-urlencoded"
1218 ],
1219 "user-agent": [
1220 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1221 ],
1222 "accept": [
1223 "application/mercurial-0.1"
1224 ]
1225 },
1226 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1227 "method": "POST",
1228 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22494b750e5194bb08c1de04d6b292e28273afa18c%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22494b750e5194bb08c1de04d6b292e28273afa18c%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22834ab31d80aede3f92435c26366095c3518dc5af%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20254%2C+%22name%22%3A+%22local%3Acommits%22%7D"
1229 }
1230 },
1231 {
1232 "response": {
1233 "headers": {
1234 "date": [
1235 "Tue, 18 Feb 2020 18:43:19 GMT"
1236 ],
1237 "strict-transport-security": [
1238 "max-age=0; includeSubdomains; preload"
1239 ],
1240 "x-frame-options": [
1241 "Deny"
1242 ],
1243 "content-type": [
1244 "application/json"
1245 ],
1246 "transfer-encoding": [
1247 "chunked"
1248 ],
1249 "x-xss-protection": [
1250 "1; mode=block"
1251 ],
1252 "expires": [
1253 "Sat, 01 Jan 2000 00:00:00 GMT"
1254 ],
1255 "x-content-type-options": [
1256 "nosniff"
1257 ],
1258 "referrer-policy": [
1259 "no-referrer"
1260 ],
1261 "server": [
1262 "Apache/2.4.10 (Debian)"
1263 ],
1264 "cache-control": [
1265 "no-store"
1266 ]
1267 },
1268 "status": {
1269 "code": 200,
1270 "message": "OK"
1271 },
1272 "body": {
1273 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"copied binary\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"copied binary\"}]},\"error_code\":null,\"error_info\":null}"
1274 }
1275 },
1276 "request": {
1277 "headers": {
1278 "content-length": [
1279 "158"
1280 ],
1281 "host": [
1282 "phab.mercurial-scm.org"
1283 ],
1284 "content-type": [
1285 "application/x-www-form-urlencoded"
1286 ],
1287 "user-agent": [
1288 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1289 ],
1290 "accept": [
1291 "application/mercurial-0.1"
1292 ]
1293 },
1294 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
1295 "method": "POST",
1296 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22copied+binary%22%7D"
1297 }
1298 },
1299 {
1300 "response": {
1301 "headers": {
1302 "date": [
1303 "Tue, 18 Feb 2020 18:43:20 GMT"
1304 ],
1305 "strict-transport-security": [
1306 "max-age=0; includeSubdomains; preload"
1307 ],
1308 "x-frame-options": [
1309 "Deny"
1310 ],
1311 "content-type": [
1312 "application/json"
1313 ],
1314 "transfer-encoding": [
1315 "chunked"
1316 ],
1317 "x-xss-protection": [
1318 "1; mode=block"
1319 ],
1320 "expires": [
1321 "Sat, 01 Jan 2000 00:00:00 GMT"
1322 ],
1323 "x-content-type-options": [
1324 "nosniff"
1325 ],
1326 "referrer-policy": [
1327 "no-referrer"
1328 ],
1329 "server": [
1330 "Apache/2.4.10 (Debian)"
1331 ],
1332 "cache-control": [
1333 "no-store"
1334 ]
1335 },
1336 "status": {
1337 "code": 200,
1338 "message": "OK"
1339 },
1340 "body": {
1341 "string": "{\"result\":{\"object\":{\"id\":8130,\"phid\":\"PHID-DREV-fqgsuuwbzvaodvaeengd\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-5vmkafcunj3k74h\"},{\"phid\":\"PHID-XACT-DREV-fqkifg45n7tfcwm\"},{\"phid\":\"PHID-XACT-DREV-xuxqxmsrs5l4ppy\"},{\"phid\":\"PHID-XACT-DREV-brzpr5bfvs7r6jr\"},{\"phid\":\"PHID-XACT-DREV-3jplzixdyccpkpf\"},{\"phid\":\"PHID-XACT-DREV-sqm2qjussz6bsta\"}]},\"error_code\":null,\"error_info\":null}"
1342 }
1343 },
1344 "request": {
1345 "headers": {
1346 "content-length": [
1347 "413"
1348 ],
1349 "host": [
1350 "phab.mercurial-scm.org"
1351 ],
1352 "content-type": [
1353 "application/x-www-form-urlencoded"
1354 ],
1355 "user-agent": [
1356 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1357 ],
1358 "accept": [
1359 "application/mercurial-0.1"
1360 ]
1361 },
1362 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
1363 "method": "POST",
1364 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-g7zeghdg2dzimfldfk5b%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-s74qwcmdszxugly5fjru%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22copied+binary%22%7D%5D%7D"
1365 }
1366 },
1367 {
1368 "response": {
1369 "headers": {
1370 "date": [
1371 "Tue, 18 Feb 2020 18:43:21 GMT"
1372 ],
1373 "strict-transport-security": [
1374 "max-age=0; includeSubdomains; preload"
1375 ],
1376 "x-frame-options": [
1377 "Deny"
1378 ],
1379 "content-type": [
1380 "application/json"
1381 ],
1382 "transfer-encoding": [
1383 "chunked"
1384 ],
1385 "x-xss-protection": [
1386 "1; mode=block"
1387 ],
1388 "expires": [
1389 "Sat, 01 Jan 2000 00:00:00 GMT"
1390 ],
1391 "x-content-type-options": [
1392 "nosniff"
1393 ],
1394 "referrer-policy": [
1395 "no-referrer"
1396 ],
1397 "server": [
1398 "Apache/2.4.10 (Debian)"
1399 ],
1400 "cache-control": [
1401 "no-store"
1402 ]
1403 },
1404 "status": {
1405 "code": 200,
1406 "message": "OK"
1407 },
1408 "body": {
1409 "string": "{\"result\":{\"upload\":true,\"filePHID\":null},\"error_code\":null,\"error_info\":null}"
1410 }
1411 },
1412 "request": {
1413 "headers": {
1414 "content-length": [
1415 "282"
1416 ],
1417 "host": [
1418 "phab.mercurial-scm.org"
1419 ],
1420 "content-type": [
1421 "application/x-www-form-urlencoded"
1422 ],
1423 "user-agent": [
1424 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1425 ],
1426 "accept": [
1427 "application/mercurial-0.1"
1428 ]
1429 },
1430 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
1431 "method": "POST",
1432 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22c95c6d5f8642ef05301e41e9f07b12b1ba6ab75ec0892e125c9ea87ac380c396%22%2C+%22contentLength%22%3A+9%2C+%22name%22%3A+%22bin2_moved_again%22%7D"
1433 }
1434 },
1435 {
1436 "response": {
1437 "headers": {
1438 "date": [
1439 "Tue, 18 Feb 2020 18:43:21 GMT"
1440 ],
1441 "strict-transport-security": [
1442 "max-age=0; includeSubdomains; preload"
1443 ],
1444 "x-frame-options": [
1445 "Deny"
1446 ],
1447 "content-type": [
1448 "application/json"
1449 ],
1450 "transfer-encoding": [
1451 "chunked"
1452 ],
1453 "x-xss-protection": [
1454 "1; mode=block"
1455 ],
1456 "expires": [
1457 "Sat, 01 Jan 2000 00:00:00 GMT"
1458 ],
1459 "x-content-type-options": [
1460 "nosniff"
1461 ],
1462 "referrer-policy": [
1463 "no-referrer"
1464 ],
1465 "server": [
1466 "Apache/2.4.10 (Debian)"
1467 ],
1468 "cache-control": [
1469 "no-store"
1470 ]
1471 },
1472 "status": {
1473 "code": 200,
1474 "message": "OK"
1475 },
1476 "body": {
1477 "string": "{\"result\":\"PHID-FILE-2jcxh7p4n5p7gqhpyvv6\",\"error_code\":null,\"error_info\":null}"
1478 }
1479 },
1480 "request": {
1481 "headers": {
1482 "content-length": [
1483 "202"
1484 ],
1485 "host": [
1486 "phab.mercurial-scm.org"
1487 ],
1488 "content-type": [
1489 "application/x-www-form-urlencoded"
1490 ],
1491 "user-agent": [
1492 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1493 ],
1494 "accept": [
1495 "application/mercurial-0.1"
1496 ]
1497 },
1498 "uri": "https://phab.mercurial-scm.org//api/file.upload",
1499 "method": "POST",
1500 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data_base64%22%3A+%22AG1vdmUrbW9k%22%2C+%22name%22%3A+%22bin2_moved_again%22%7D"
1501 }
1502 },
1503 {
1504 "response": {
1505 "headers": {
1506 "date": [
1507 "Tue, 18 Feb 2020 18:43:22 GMT"
1508 ],
1509 "strict-transport-security": [
1510 "max-age=0; includeSubdomains; preload"
1511 ],
1512 "x-frame-options": [
1513 "Deny"
1514 ],
1515 "content-type": [
1516 "application/json"
1517 ],
1518 "transfer-encoding": [
1519 "chunked"
1520 ],
1521 "x-xss-protection": [
1522 "1; mode=block"
1523 ],
1524 "expires": [
1525 "Sat, 01 Jan 2000 00:00:00 GMT"
1526 ],
1527 "x-content-type-options": [
1528 "nosniff"
1529 ],
1530 "referrer-policy": [
1531 "no-referrer"
1532 ],
1533 "server": [
1534 "Apache/2.4.10 (Debian)"
1535 ],
1536 "cache-control": [
1537 "no-store"
1538 ]
1539 },
1540 "status": {
1541 "code": 200,
1542 "message": "OK"
1543 },
1544 "body": {
1545 "string": "{\"result\":{\"upload\":false,\"filePHID\":\"PHID-FILE-kjwtw4t6foc3cxtpcot2\"},\"error_code\":null,\"error_info\":null}"
1546 }
1547 },
1548 "request": {
1549 "headers": {
1550 "content-length": [
1551 "277"
1552 ],
1553 "host": [
1554 "phab.mercurial-scm.org"
1555 ],
1556 "content-type": [
1557 "application/x-www-form-urlencoded"
1558 ],
1559 "user-agent": [
1560 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1561 ],
1562 "accept": [
1563 "application/mercurial-0.1"
1564 ]
1565 },
1566 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
1567 "method": "POST",
1568 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2_copied%22%7D"
1569 }
1570 },
1571 {
1572 "response": {
1573 "headers": {
1574 "date": [
1575 "Tue, 18 Feb 2020 18:43:22 GMT"
1576 ],
1577 "strict-transport-security": [
1578 "max-age=0; includeSubdomains; preload"
1579 ],
1580 "x-frame-options": [
1581 "Deny"
1582 ],
1583 "content-type": [
1584 "application/json"
1585 ],
1586 "transfer-encoding": [
1587 "chunked"
1588 ],
1589 "x-xss-protection": [
1590 "1; mode=block"
1591 ],
1592 "expires": [
1593 "Sat, 01 Jan 2000 00:00:00 GMT"
1594 ],
1595 "x-content-type-options": [
1596 "nosniff"
1597 ],
1598 "referrer-policy": [
1599 "no-referrer"
1600 ],
1601 "server": [
1602 "Apache/2.4.10 (Debian)"
1603 ],
1604 "cache-control": [
1605 "no-store"
1606 ]
1607 },
1608 "status": {
1609 "code": 200,
1610 "message": "OK"
1611 },
1612 "body": {
1613 "string": "{\"result\":{\"diffid\":20255,\"phid\":\"PHID-DIFF-euvajw4uojheyjmhqmav\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/20255\\/\"},\"error_code\":null,\"error_info\":null}"
1614 }
1615 },
1616 "request": {
1617 "headers": {
1618 "content-length": [
1619 "1575"
1620 ],
1621 "host": [
1622 "phab.mercurial-scm.org"
1623 ],
1624 "content-type": [
1625 "application/x-www-form-urlencoded"
1626 ],
1627 "user-agent": [
1628 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1629 ],
1630 "accept": [
1631 "application/mercurial-0.1"
1632 ]
1633 },
1634 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
1635 "method": "POST",
1636 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22bin2_copied%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%22bin2_moved_again%22%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_copied%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2_copied%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+4%7D%2C+%22bin2_moved_again%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_moved_again%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+3%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-2jcxh7p4n5p7gqhpyvv6%22%2C+%22new%3Afile%3Asize%22%3A+9%2C+%22old%3Abinary-phid%22%3A+%22PHID-FILE-kjwtw4t6foc3cxtpcot2%22%2C+%22old%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2_copied%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+6%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22494b750e5194bb08c1de04d6b292e28273afa18c%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D"
1637 }
1638 },
1639 {
1640 "response": {
1641 "headers": {
1642 "date": [
1643 "Tue, 18 Feb 2020 18:43:22 GMT"
1644 ],
1645 "strict-transport-security": [
1646 "max-age=0; includeSubdomains; preload"
1647 ],
1648 "x-frame-options": [
1649 "Deny"
1650 ],
1651 "content-type": [
1652 "application/json"
1653 ],
1654 "transfer-encoding": [
1655 "chunked"
1656 ],
1657 "x-xss-protection": [
1658 "1; mode=block"
1659 ],
1660 "expires": [
1661 "Sat, 01 Jan 2000 00:00:00 GMT"
1662 ],
1663 "x-content-type-options": [
1664 "nosniff"
1665 ],
1666 "referrer-policy": [
1667 "no-referrer"
1668 ],
1669 "server": [
1670 "Apache/2.4.10 (Debian)"
1671 ],
1672 "cache-control": [
1673 "no-store"
1674 ]
1675 },
1676 "status": {
1677 "code": 200,
1678 "message": "OK"
1679 },
1680 "body": {
1681 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1682 }
1683 },
1684 "request": {
1685 "headers": {
1686 "content-length": [
1687 "482"
1688 ],
1689 "host": [
1690 "phab.mercurial-scm.org"
1691 ],
1692 "content-type": [
1693 "application/x-www-form-urlencoded"
1694 ],
1695 "user-agent": [
1696 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1697 ],
1698 "accept": [
1699 "application/mercurial-0.1"
1700 ]
1701 },
1702 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1703 "method": "POST",
1704 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22494b750e5194bb08c1de04d6b292e28273afa18c%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20255%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
1705 }
1706 },
1707 {
1708 "response": {
1709 "headers": {
1710 "date": [
1711 "Tue, 18 Feb 2020 18:43:23 GMT"
1712 ],
1713 "strict-transport-security": [
1714 "max-age=0; includeSubdomains; preload"
1715 ],
1716 "x-frame-options": [
1717 "Deny"
1718 ],
1719 "content-type": [
1720 "application/json"
1721 ],
1722 "transfer-encoding": [
1723 "chunked"
1724 ],
1725 "x-xss-protection": [
1726 "1; mode=block"
1727 ],
1728 "expires": [
1729 "Sat, 01 Jan 2000 00:00:00 GMT"
1730 ],
1731 "x-content-type-options": [
1732 "nosniff"
1733 ],
1734 "referrer-policy": [
1735 "no-referrer"
1736 ],
1737 "server": [
1738 "Apache/2.4.10 (Debian)"
1739 ],
1740 "cache-control": [
1741 "no-store"
1742 ]
1743 },
1744 "status": {
1745 "code": 200,
1746 "message": "OK"
1747 },
1748 "body": {
1749 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1750 }
1751 },
1752 "request": {
1753 "headers": {
1754 "content-length": [
1755 "594"
1756 ],
1757 "host": [
1758 "phab.mercurial-scm.org"
1759 ],
1760 "content-type": [
1761 "application/x-www-form-urlencoded"
1762 ],
1763 "user-agent": [
1764 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1765 ],
1766 "accept": [
1767 "application/mercurial-0.1"
1768 ]
1769 },
1770 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1771 "method": "POST",
1772 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22494b750e5194bb08c1de04d6b292e28273afa18c%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20255%2C+%22name%22%3A+%22local%3Acommits%22%7D"
1773 }
1774 },
1775 {
1776 "response": {
1777 "headers": {
1778 "date": [
1779 "Tue, 18 Feb 2020 18:43:23 GMT"
1780 ],
1781 "strict-transport-security": [
1782 "max-age=0; includeSubdomains; preload"
1783 ],
1784 "x-frame-options": [
1785 "Deny"
1786 ],
1787 "content-type": [
1788 "application/json"
1789 ],
1790 "transfer-encoding": [
1791 "chunked"
1792 ],
1793 "x-xss-protection": [
1794 "1; mode=block"
1795 ],
1796 "expires": [
1797 "Sat, 01 Jan 2000 00:00:00 GMT"
1798 ],
1799 "x-content-type-options": [
1800 "nosniff"
1801 ],
1802 "referrer-policy": [
1803 "no-referrer"
1804 ],
1805 "server": [
1806 "Apache/2.4.10 (Debian)"
1807 ],
1808 "cache-control": [
1809 "no-store"
1810 ]
1811 },
1812 "status": {
1813 "code": 200,
1814 "message": "OK"
1815 },
1816 "body": {
1817 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"move+mod copied binary\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"move+mod copied binary\"}]},\"error_code\":null,\"error_info\":null}"
1818 }
1819 },
1820 "request": {
1821 "headers": {
1822 "content-length": [
1823 "169"
1824 ],
1825 "host": [
1826 "phab.mercurial-scm.org"
1827 ],
1828 "content-type": [
1829 "application/x-www-form-urlencoded"
1830 ],
1831 "user-agent": [
1832 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1833 ],
1834 "accept": [
1835 "application/mercurial-0.1"
1836 ]
1837 },
1838 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
1839 "method": "POST",
1840 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22move%2Bmod+copied+binary%22%7D"
1841 }
1842 },
1843 {
1844 "response": {
1845 "headers": {
1846 "date": [
1847 "Tue, 18 Feb 2020 18:43:24 GMT"
1848 ],
1849 "strict-transport-security": [
1850 "max-age=0; includeSubdomains; preload"
1851 ],
1852 "x-frame-options": [
1853 "Deny"
1854 ],
1855 "content-type": [
1856 "application/json"
1857 ],
1858 "transfer-encoding": [
1859 "chunked"
1860 ],
1861 "x-xss-protection": [
1862 "1; mode=block"
1863 ],
1864 "expires": [
1865 "Sat, 01 Jan 2000 00:00:00 GMT"
1866 ],
1867 "x-content-type-options": [
1868 "nosniff"
1869 ],
1870 "referrer-policy": [
1871 "no-referrer"
1872 ],
1873 "server": [
1874 "Apache/2.4.10 (Debian)"
1875 ],
1876 "cache-control": [
1877 "no-store"
1878 ]
1879 },
1880 "status": {
1881 "code": 200,
1882 "message": "OK"
1883 },
1884 "body": {
1885 "string": "{\"result\":{\"object\":{\"id\":8131,\"phid\":\"PHID-DREV-hcbzjqg4xhb6thfuhkgp\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-ui3eapxx7g4iur2\"},{\"phid\":\"PHID-XACT-DREV-wgr7u5ka7qru33i\"},{\"phid\":\"PHID-XACT-DREV-2yozrzratdvxnl7\"},{\"phid\":\"PHID-XACT-DREV-ikamh5ifb64cnr5\"},{\"phid\":\"PHID-XACT-DREV-cu2qhhytni4ruip\"},{\"phid\":\"PHID-XACT-DREV-n3zs6drpigwdrtw\"}]},\"error_code\":null,\"error_info\":null}"
1886 }
1887 },
1888 "request": {
1889 "headers": {
1890 "content-length": [
1891 "424"
1892 ],
1893 "host": [
1894 "phab.mercurial-scm.org"
1895 ],
1896 "content-type": [
1897 "application/x-www-form-urlencoded"
1898 ],
1899 "user-agent": [
1900 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1901 ],
1902 "accept": [
1903 "application/mercurial-0.1"
1904 ]
1905 },
1906 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
1907 "method": "POST",
1908 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-euvajw4uojheyjmhqmav%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-fqgsuuwbzvaodvaeengd%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22move%2Bmod+copied+binary%22%7D%5D%7D"
1909 }
1910 },
1911 {
1912 "response": {
1913 "headers": {
1914 "date": [
1915 "Tue, 18 Feb 2020 18:43:24 GMT"
1916 ],
1917 "strict-transport-security": [
1918 "max-age=0; includeSubdomains; preload"
1919 ],
1920 "x-frame-options": [
1921 "Deny"
1922 ],
1923 "content-type": [
1924 "application/json"
1925 ],
1926 "transfer-encoding": [
1927 "chunked"
1928 ],
1929 "x-xss-protection": [
1930 "1; mode=block"
1931 ],
1932 "expires": [
1933 "Sat, 01 Jan 2000 00:00:00 GMT"
1934 ],
1935 "x-content-type-options": [
1936 "nosniff"
1937 ],
1938 "referrer-policy": [
1939 "no-referrer"
1940 ],
1941 "server": [
1942 "Apache/2.4.10 (Debian)"
1943 ],
1944 "cache-control": [
1945 "no-store"
1946 ]
1947 },
1948 "status": {
1949 "code": 200,
1950 "message": "OK"
1951 },
1952 "body": {
1953 "string": "{\"result\":{\"upload\":true,\"filePHID\":null},\"error_code\":null,\"error_info\":null}"
1954 }
1955 },
1956 "request": {
1957 "headers": {
1958 "content-length": [
1959 "284"
1960 ],
1961 "host": [
1962 "phab.mercurial-scm.org"
1963 ],
1964 "content-type": [
1965 "application/x-www-form-urlencoded"
1966 ],
1967 "user-agent": [
1968 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
1969 ],
1970 "accept": [
1971 "application/mercurial-0.1"
1972 ]
1973 },
1974 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
1975 "method": "POST",
1976 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%226aa3b10b091f1381d0720c4e96baee2ac03f355a10b0bc1afebf082cab1a9589%22%2C+%22contentLength%22%3A+12%2C+%22name%22%3A+%22bin2_moved_copied%22%7D"
1977 }
1978 },
1979 {
1980 "response": {
1981 "headers": {
1982 "date": [
1983 "Tue, 18 Feb 2020 18:43:25 GMT"
1984 ],
1985 "strict-transport-security": [
1986 "max-age=0; includeSubdomains; preload"
1987 ],
1988 "x-frame-options": [
1989 "Deny"
1990 ],
1991 "content-type": [
1992 "application/json"
1993 ],
1994 "transfer-encoding": [
1995 "chunked"
1996 ],
1997 "x-xss-protection": [
1998 "1; mode=block"
1999 ],
2000 "expires": [
2001 "Sat, 01 Jan 2000 00:00:00 GMT"
2002 ],
2003 "x-content-type-options": [
2004 "nosniff"
2005 ],
2006 "referrer-policy": [
2007 "no-referrer"
2008 ],
2009 "server": [
2010 "Apache/2.4.10 (Debian)"
2011 ],
2012 "cache-control": [
2013 "no-store"
2014 ]
2015 },
2016 "status": {
2017 "code": 200,
2018 "message": "OK"
2019 },
2020 "body": {
2021 "string": "{\"result\":\"PHID-FILE-2kkudzwg35maqlp4mkdz\",\"error_code\":null,\"error_info\":null}"
2022 }
2023 },
2024 "request": {
2025 "headers": {
2026 "content-length": [
2027 "207"
2028 ],
2029 "host": [
2030 "phab.mercurial-scm.org"
2031 ],
2032 "content-type": [
2033 "application/x-www-form-urlencoded"
2034 ],
2035 "user-agent": [
2036 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2037 ],
2038 "accept": [
2039 "application/mercurial-0.1"
2040 ]
2041 },
2042 "uri": "https://phab.mercurial-scm.org//api/file.upload",
2043 "method": "POST",
2044 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data_base64%22%3A+%22AHByZWNvcHkgbW9k%22%2C+%22name%22%3A+%22bin2_moved_copied%22%7D"
2045 }
2046 },
2047 {
2048 "response": {
2049 "headers": {
2050 "date": [
2051 "Tue, 18 Feb 2020 18:43:25 GMT"
2052 ],
2053 "strict-transport-security": [
2054 "max-age=0; includeSubdomains; preload"
2055 ],
2056 "x-frame-options": [
2057 "Deny"
2058 ],
2059 "content-type": [
2060 "application/json"
2061 ],
2062 "transfer-encoding": [
2063 "chunked"
2064 ],
2065 "x-xss-protection": [
2066 "1; mode=block"
2067 ],
2068 "expires": [
2069 "Sat, 01 Jan 2000 00:00:00 GMT"
2070 ],
2071 "x-content-type-options": [
2072 "nosniff"
2073 ],
2074 "referrer-policy": [
2075 "no-referrer"
2076 ],
2077 "server": [
2078 "Apache/2.4.10 (Debian)"
2079 ],
2080 "cache-control": [
2081 "no-store"
2082 ]
2083 },
2084 "status": {
2085 "code": 200,
2086 "message": "OK"
2087 },
2088 "body": {
2089 "string": "{\"result\":{\"upload\":false,\"filePHID\":\"PHID-FILE-cj76tzcs23ob2jmkoscw\"},\"error_code\":null,\"error_info\":null}"
2090 }
2091 },
2092 "request": {
2093 "headers": {
2094 "content-length": [
2095 "276"
2096 ],
2097 "host": [
2098 "phab.mercurial-scm.org"
2099 ],
2100 "content-type": [
2101 "application/x-www-form-urlencoded"
2102 ],
2103 "user-agent": [
2104 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2105 ],
2106 "accept": [
2107 "application/mercurial-0.1"
2108 ]
2109 },
2110 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
2111 "method": "POST",
2112 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2_moved%22%7D"
2113 }
2114 },
2115 {
2116 "response": {
2117 "headers": {
2118 "date": [
2119 "Tue, 18 Feb 2020 18:43:26 GMT"
2120 ],
2121 "strict-transport-security": [
2122 "max-age=0; includeSubdomains; preload"
2123 ],
2124 "x-frame-options": [
2125 "Deny"
2126 ],
2127 "content-type": [
2128 "application/json"
2129 ],
2130 "transfer-encoding": [
2131 "chunked"
2132 ],
2133 "x-xss-protection": [
2134 "1; mode=block"
2135 ],
2136 "expires": [
2137 "Sat, 01 Jan 2000 00:00:00 GMT"
2138 ],
2139 "x-content-type-options": [
2140 "nosniff"
2141 ],
2142 "referrer-policy": [
2143 "no-referrer"
2144 ],
2145 "server": [
2146 "Apache/2.4.10 (Debian)"
2147 ],
2148 "cache-control": [
2149 "no-store"
2150 ]
2151 },
2152 "status": {
2153 "code": 200,
2154 "message": "OK"
2155 },
2156 "body": {
2157 "string": "{\"result\":{\"upload\":true,\"filePHID\":null},\"error_code\":null,\"error_info\":null}"
2158 }
2159 },
2160 "request": {
2161 "headers": {
2162 "content-length": [
2163 "277"
2164 ],
2165 "host": [
2166 "phab.mercurial-scm.org"
2167 ],
2168 "content-type": [
2169 "application/x-www-form-urlencoded"
2170 ],
2171 "user-agent": [
2172 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2173 ],
2174 "accept": [
2175 "application/mercurial-0.1"
2176 ]
2177 },
2178 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
2179 "method": "POST",
2180 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22abade6cd35ec23966dc37ae41b0ef87cbe6e7f735965176eb5f04d90c1034492%22%2C+%22contentLength%22%3A+13%2C+%22name%22%3A+%22bin2_moved%22%7D"
2181 }
2182 },
2183 {
2184 "response": {
2185 "headers": {
2186 "date": [
2187 "Tue, 18 Feb 2020 18:43:26 GMT"
2188 ],
2189 "strict-transport-security": [
2190 "max-age=0; includeSubdomains; preload"
2191 ],
2192 "x-frame-options": [
2193 "Deny"
2194 ],
2195 "content-type": [
2196 "application/json"
2197 ],
2198 "transfer-encoding": [
2199 "chunked"
2200 ],
2201 "x-xss-protection": [
2202 "1; mode=block"
2203 ],
2204 "expires": [
2205 "Sat, 01 Jan 2000 00:00:00 GMT"
2206 ],
2207 "x-content-type-options": [
2208 "nosniff"
2209 ],
2210 "referrer-policy": [
2211 "no-referrer"
2212 ],
2213 "server": [
2214 "Apache/2.4.10 (Debian)"
2215 ],
2216 "cache-control": [
2217 "no-store"
2218 ]
2219 },
2220 "status": {
2221 "code": 200,
2222 "message": "OK"
2223 },
2224 "body": {
2225 "string": "{\"result\":\"PHID-FILE-p7kxwaunxwqcsfp2mcai\",\"error_code\":null,\"error_info\":null}"
2226 }
2227 },
2228 "request": {
2229 "headers": {
2230 "content-length": [
2231 "208"
2232 ],
2233 "host": [
2234 "phab.mercurial-scm.org"
2235 ],
2236 "content-type": [
2237 "application/x-www-form-urlencoded"
2238 ],
2239 "user-agent": [
2240 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2241 ],
2242 "accept": [
2243 "application/mercurial-0.1"
2244 ]
2245 },
2246 "uri": "https://phab.mercurial-scm.org//api/file.upload",
2247 "method": "POST",
2248 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data_base64%22%3A+%22AGNvcHkgc3JjK21vZA%3D%3D%22%2C+%22name%22%3A+%22bin2_moved%22%7D"
2249 }
2250 },
2251 {
2252 "response": {
2253 "headers": {
2254 "date": [
2255 "Tue, 18 Feb 2020 18:43:27 GMT"
2256 ],
2257 "strict-transport-security": [
2258 "max-age=0; includeSubdomains; preload"
2259 ],
2260 "x-frame-options": [
2261 "Deny"
2262 ],
2263 "content-type": [
2264 "application/json"
2265 ],
2266 "transfer-encoding": [
2267 "chunked"
2268 ],
2269 "x-xss-protection": [
2270 "1; mode=block"
2271 ],
2272 "expires": [
2273 "Sat, 01 Jan 2000 00:00:00 GMT"
2274 ],
2275 "x-content-type-options": [
2276 "nosniff"
2277 ],
2278 "referrer-policy": [
2279 "no-referrer"
2280 ],
2281 "server": [
2282 "Apache/2.4.10 (Debian)"
2283 ],
2284 "cache-control": [
2285 "no-store"
2286 ]
2287 },
2288 "status": {
2289 "code": 200,
2290 "message": "OK"
2291 },
2292 "body": {
2293 "string": "{\"result\":{\"upload\":false,\"filePHID\":\"PHID-FILE-erukut7iaaxa6anzphgx\"},\"error_code\":null,\"error_info\":null}"
2294 }
2295 },
2296 "request": {
2297 "headers": {
2298 "content-length": [
2299 "276"
2300 ],
2301 "host": [
2302 "phab.mercurial-scm.org"
2303 ],
2304 "content-type": [
2305 "application/x-www-form-urlencoded"
2306 ],
2307 "user-agent": [
2308 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2309 ],
2310 "accept": [
2311 "application/mercurial-0.1"
2312 ]
2313 },
2314 "uri": "https://phab.mercurial-scm.org//api/file.allocate",
2315 "method": "POST",
2316 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22contentHash%22%3A+%22597fcb31282d34654c200d3418fca5705c648ebf326ec73d8ddef11841f876d8%22%2C+%22contentLength%22%3A+2%2C+%22name%22%3A+%22bin2_moved%22%7D"
2317 }
2318 },
2319 {
2320 "response": {
2321 "headers": {
2322 "date": [
2323 "Tue, 18 Feb 2020 18:43:27 GMT"
2324 ],
2325 "strict-transport-security": [
2326 "max-age=0; includeSubdomains; preload"
2327 ],
2328 "x-frame-options": [
2329 "Deny"
2330 ],
2331 "content-type": [
2332 "application/json"
2333 ],
2334 "transfer-encoding": [
2335 "chunked"
2336 ],
2337 "x-xss-protection": [
2338 "1; mode=block"
2339 ],
2340 "expires": [
2341 "Sat, 01 Jan 2000 00:00:00 GMT"
2342 ],
2343 "x-content-type-options": [
2344 "nosniff"
2345 ],
2346 "referrer-policy": [
2347 "no-referrer"
2348 ],
2349 "server": [
2350 "Apache/2.4.10 (Debian)"
2351 ],
2352 "cache-control": [
2353 "no-store"
2354 ]
2355 },
2356 "status": {
2357 "code": 200,
2358 "message": "OK"
2359 },
2360 "body": {
2361 "string": "{\"result\":{\"diffid\":20256,\"phid\":\"PHID-DIFF-mmdclcfxghe4byjqhaaa\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/20256\\/\"},\"error_code\":null,\"error_info\":null}"
2362 }
2363 },
2364 "request": {
2365 "headers": {
2366 "content-length": [
2367 "1747"
2368 ],
2369 "host": [
2370 "phab.mercurial-scm.org"
2371 ],
2372 "content-type": [
2373 "application/x-www-form-urlencoded"
2374 ],
2375 "user-agent": [
2376 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2377 ],
2378 "accept": [
2379 "application/mercurial-0.1"
2380 ]
2381 },
2382 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
2383 "method": "POST",
2384 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22bin2_moved%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_moved%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+3%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-p7kxwaunxwqcsfp2mcai%22%2C+%22new%3Afile%3Asize%22%3A+13%2C+%22old%3Abinary-phid%22%3A+%22PHID-FILE-erukut7iaaxa6anzphgx%22%2C+%22old%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2_moved%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%2C+%22bin2_moved_copied%22%3A+%7B%22addLines%22%3A+0%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22bin2_moved_copied%22%2C+%22delLines%22%3A+0%2C+%22fileType%22%3A+3%2C+%22hunks%22%3A+%5B%5D%2C+%22metadata%22%3A+%7B%22new%3Abinary-phid%22%3A+%22PHID-FILE-2kkudzwg35maqlp4mkdz%22%2C+%22new%3Afile%3Asize%22%3A+12%2C+%22old%3Abinary-phid%22%3A+%22PHID-FILE-cj76tzcs23ob2jmkoscw%22%2C+%22old%3Afile%3Asize%22%3A+2%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22bin2_moved%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+7%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D"
2385 }
2386 },
2387 {
2388 "response": {
2389 "headers": {
2390 "date": [
2391 "Tue, 18 Feb 2020 18:43:28 GMT"
2392 ],
2393 "strict-transport-security": [
2394 "max-age=0; includeSubdomains; preload"
2395 ],
2396 "x-frame-options": [
2397 "Deny"
2398 ],
2399 "content-type": [
2400 "application/json"
2401 ],
2402 "transfer-encoding": [
2403 "chunked"
2404 ],
2405 "x-xss-protection": [
2406 "1; mode=block"
2407 ],
2408 "expires": [
2409 "Sat, 01 Jan 2000 00:00:00 GMT"
2410 ],
2411 "x-content-type-options": [
2412 "nosniff"
2413 ],
2414 "referrer-policy": [
2415 "no-referrer"
2416 ],
2417 "server": [
2418 "Apache/2.4.10 (Debian)"
2419 ],
2420 "cache-control": [
2421 "no-store"
2422 ]
2423 },
2424 "status": {
2425 "code": 200,
2426 "message": "OK"
2427 },
2428 "body": {
2429 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2430 }
2431 },
2432 "request": {
2433 "headers": {
2434 "content-length": [
2435 "482"
2436 ],
2437 "host": [
2438 "phab.mercurial-scm.org"
2439 ],
2440 "content-type": [
2441 "application/x-www-form-urlencoded"
2442 ],
2443 "user-agent": [
2444 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2445 ],
2446 "accept": [
2447 "application/mercurial-0.1"
2448 ]
2449 },
2450 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2451 "method": "POST",
2452 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%221b87b363a5e4ce4cd1f05d8518eee1f55cf31919%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20256%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
2453 }
2454 },
2455 {
2456 "response": {
2457 "headers": {
2458 "date": [
2459 "Tue, 18 Feb 2020 18:43:28 GMT"
2460 ],
2461 "strict-transport-security": [
2462 "max-age=0; includeSubdomains; preload"
2463 ],
2464 "x-frame-options": [
2465 "Deny"
2466 ],
2467 "content-type": [
2468 "application/json"
2469 ],
2470 "transfer-encoding": [
2471 "chunked"
2472 ],
2473 "x-xss-protection": [
2474 "1; mode=block"
2475 ],
2476 "expires": [
2477 "Sat, 01 Jan 2000 00:00:00 GMT"
2478 ],
2479 "x-content-type-options": [
2480 "nosniff"
2481 ],
2482 "referrer-policy": [
2483 "no-referrer"
2484 ],
2485 "server": [
2486 "Apache/2.4.10 (Debian)"
2487 ],
2488 "cache-control": [
2489 "no-store"
2490 ]
2491 },
2492 "status": {
2493 "code": 200,
2494 "message": "OK"
2495 },
2496 "body": {
2497 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2498 }
2499 },
2500 "request": {
2501 "headers": {
2502 "content-length": [
2503 "594"
2504 ],
2505 "host": [
2506 "phab.mercurial-scm.org"
2507 ],
2508 "content-type": [
2509 "application/x-www-form-urlencoded"
2510 ],
2511 "user-agent": [
2512 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2513 ],
2514 "accept": [
2515 "application/mercurial-0.1"
2516 ]
2517 },
2518 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2519 "method": "POST",
2520 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%221b87b363a5e4ce4cd1f05d8518eee1f55cf31919%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%221b87b363a5e4ce4cd1f05d8518eee1f55cf31919%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2225f766b50cc23ba2f44ed3bc707b429cdf0186e8%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20256%2C+%22name%22%3A+%22local%3Acommits%22%7D"
2521 }
2522 },
2523 {
2524 "response": {
2525 "headers": {
2526 "date": [
2527 "Tue, 18 Feb 2020 18:43:28 GMT"
2528 ],
2529 "strict-transport-security": [
2530 "max-age=0; includeSubdomains; preload"
2531 ],
2532 "x-frame-options": [
2533 "Deny"
2534 ],
2535 "content-type": [
2536 "application/json"
2537 ],
2538 "transfer-encoding": [
2539 "chunked"
2540 ],
2541 "x-xss-protection": [
2542 "1; mode=block"
2543 ],
2544 "expires": [
2545 "Sat, 01 Jan 2000 00:00:00 GMT"
2546 ],
2547 "x-content-type-options": [
2548 "nosniff"
2549 ],
2550 "referrer-policy": [
2551 "no-referrer"
2552 ],
2553 "server": [
2554 "Apache/2.4.10 (Debian)"
2555 ],
2556 "cache-control": [
2557 "no-store"
2558 ]
2559 },
2560 "status": {
2561 "code": 200,
2562 "message": "OK"
2563 },
2564 "body": {
2565 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"copy+mod moved binary\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"copy+mod moved binary\"}]},\"error_code\":null,\"error_info\":null}"
2566 }
2567 },
2568 "request": {
2569 "headers": {
2570 "content-length": [
2571 "168"
2572 ],
2573 "host": [
2574 "phab.mercurial-scm.org"
2575 ],
2576 "content-type": [
2577 "application/x-www-form-urlencoded"
2578 ],
2579 "user-agent": [
2580 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2581 ],
2582 "accept": [
2583 "application/mercurial-0.1"
2584 ]
2585 },
2586 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
2587 "method": "POST",
2588 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22copy%2Bmod+moved+binary%22%7D"
2589 }
2590 },
2591 {
2592 "response": {
2593 "headers": {
2594 "date": [
2595 "Tue, 18 Feb 2020 18:43:29 GMT"
2596 ],
2597 "strict-transport-security": [
2598 "max-age=0; includeSubdomains; preload"
2599 ],
2600 "x-frame-options": [
2601 "Deny"
2602 ],
2603 "content-type": [
2604 "application/json"
2605 ],
2606 "transfer-encoding": [
2607 "chunked"
2608 ],
2609 "x-xss-protection": [
2610 "1; mode=block"
2611 ],
2612 "expires": [
2613 "Sat, 01 Jan 2000 00:00:00 GMT"
2614 ],
2615 "x-content-type-options": [
2616 "nosniff"
2617 ],
2618 "referrer-policy": [
2619 "no-referrer"
2620 ],
2621 "server": [
2622 "Apache/2.4.10 (Debian)"
2623 ],
2624 "cache-control": [
2625 "no-store"
2626 ]
2627 },
2628 "status": {
2629 "code": 200,
2630 "message": "OK"
2631 },
2632 "body": {
2633 "string": "{\"result\":{\"object\":{\"id\":8132,\"phid\":\"PHID-DREV-mdf64bhsna5jfl6nothr\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-4svgehxywd53uj7\"},{\"phid\":\"PHID-XACT-DREV-px4562fvi7lu6p5\"},{\"phid\":\"PHID-XACT-DREV-jambzs7kliwaca4\"},{\"phid\":\"PHID-XACT-DREV-6znwvwbrhzqepz3\"},{\"phid\":\"PHID-XACT-DREV-v52eykrda6sbtzo\"},{\"phid\":\"PHID-XACT-DREV-bvqyk7zylod5us6\"}]},\"error_code\":null,\"error_info\":null}"
2634 }
2635 },
2636 "request": {
2637 "headers": {
2638 "content-length": [
2639 "423"
2640 ],
2641 "host": [
2642 "phab.mercurial-scm.org"
2643 ],
2644 "content-type": [
2645 "application/x-www-form-urlencoded"
2646 ],
2647 "user-agent": [
2648 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2649 ],
2650 "accept": [
2651 "application/mercurial-0.1"
2652 ]
2653 },
2654 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
2655 "method": "POST",
2656 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-mmdclcfxghe4byjqhaaa%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-hcbzjqg4xhb6thfuhkgp%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22copy%2Bmod+moved+binary%22%7D%5D%7D"
2657 }
2658 },
2659 {
2660 "response": {
2661 "headers": {
2662 "date": [
2663 "Tue, 18 Feb 2020 18:43:30 GMT"
2664 ],
2665 "strict-transport-security": [
2666 "max-age=0; includeSubdomains; preload"
2667 ],
2668 "x-frame-options": [
2669 "Deny"
2670 ],
2671 "content-type": [
2672 "application/json"
2673 ],
2674 "transfer-encoding": [
2675 "chunked"
2676 ],
2677 "x-xss-protection": [
2678 "1; mode=block"
2679 ],
2680 "connection": [
2681 "close"
2682 ],
2683 "expires": [
2684 "Sat, 01 Jan 2000 00:00:00 GMT"
2685 ],
2686 "x-content-type-options": [
2687 "nosniff"
2688 ],
2689 "referrer-policy": [
2690 "no-referrer"
2691 ],
2692 "server": [
2693 "Apache/2.4.10 (Debian)"
2694 ],
2695 "cache-control": [
2696 "no-store"
2697 ]
2698 },
2699 "status": {
2700 "code": 200,
2701 "message": "OK"
2702 },
2703 "body": {
2704 "string": "{\"result\":[{\"id\":\"8132\",\"phid\":\"PHID-DREV-mdf64bhsna5jfl6nothr\",\"title\":\"copy+mod moved binary\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8132\",\"dateCreated\":\"1582051409\",\"dateModified\":\"1582051409\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":0,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"0\",\"activeDiffPHID\":\"PHID-DIFF-mmdclcfxghe4byjqhaaa\",\"diffs\":[\"20256\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-hcbzjqg4xhb6thfuhkgp\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8131\",\"phid\":\"PHID-DREV-hcbzjqg4xhb6thfuhkgp\",\"title\":\"move+mod copied binary\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8131\",\"dateCreated\":\"1582051404\",\"dateModified\":\"1582051409\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":0,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"0\",\"activeDiffPHID\":\"PHID-DIFF-euvajw4uojheyjmhqmav\",\"diffs\":[\"20255\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-fqgsuuwbzvaodvaeengd\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8130\",\"phid\":\"PHID-DREV-fqgsuuwbzvaodvaeengd\",\"title\":\"copied binary\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8130\",\"dateCreated\":\"1582051400\",\"dateModified\":\"1582051404\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":0,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"0\",\"activeDiffPHID\":\"PHID-DIFF-g7zeghdg2dzimfldfk5b\",\"diffs\":[\"20254\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-s74qwcmdszxugly5fjru\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8129\",\"phid\":\"PHID-DREV-s74qwcmdszxugly5fjru\",\"title\":\"moved binary\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8129\",\"dateCreated\":\"1582051397\",\"dateModified\":\"1582051400\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":0,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"0\",\"activeDiffPHID\":\"PHID-DIFF-svria2kxhgr63qwpdzzb\",\"diffs\":[\"20253\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-ebpbxa27h5ibeudclsse\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8128\",\"phid\":\"PHID-DREV-ebpbxa27h5ibeudclsse\",\"title\":\"add another binary\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8128\",\"dateCreated\":\"1582051394\",\"dateModified\":\"1582051397\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":0,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"0\",\"activeDiffPHID\":\"PHID-DIFF-haue3qqytoovnmb7orw6\",\"diffs\":[\"20252\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
2705 }
2706 },
2707 "request": {
2708 "headers": {
2709 "content-length": [
2710 "178"
2711 ],
2712 "host": [
2713 "phab.mercurial-scm.org"
2714 ],
2715 "content-type": [
2716 "application/x-www-form-urlencoded"
2717 ],
2718 "user-agent": [
2719 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2720 ],
2721 "accept": [
2722 "application/mercurial-0.1"
2723 ]
2724 },
2725 "uri": "https://phab.mercurial-scm.org//api/differential.query",
2726 "method": "POST",
2727 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8128%2C+8129%2C+8130%2C+8131%2C+8132%5D%7D"
2728 }
2729 },
2730 {
2731 "response": {
2732 "headers": {
2733 "date": [
2734 "Tue, 18 Feb 2020 18:43:30 GMT"
2735 ],
2736 "strict-transport-security": [
2737 "max-age=0; includeSubdomains; preload"
2738 ],
2739 "x-frame-options": [
2740 "Deny"
2741 ],
2742 "content-type": [
2743 "application/json"
2744 ],
2745 "transfer-encoding": [
2746 "chunked"
2747 ],
2748 "x-xss-protection": [
2749 "1; mode=block"
2750 ],
2751 "expires": [
2752 "Sat, 01 Jan 2000 00:00:00 GMT"
2753 ],
2754 "x-content-type-options": [
2755 "nosniff"
2756 ],
2757 "referrer-policy": [
2758 "no-referrer"
2759 ],
2760 "server": [
2761 "Apache/2.4.10 (Debian)"
2762 ],
2763 "cache-control": [
2764 "no-store"
2765 ]
2766 },
2767 "status": {
2768 "code": 200,
2769 "message": "OK"
2770 },
2771 "body": {
2772 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2773 }
2774 },
2775 "request": {
2776 "headers": {
2777 "content-length": [
2778 "482"
2779 ],
2780 "host": [
2781 "phab.mercurial-scm.org"
2782 ],
2783 "content-type": [
2784 "application/x-www-form-urlencoded"
2785 ],
2786 "user-agent": [
2787 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2788 ],
2789 "accept": [
2790 "application/mercurial-0.1"
2791 ]
2792 },
2793 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2794 "method": "POST",
2795 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2290437c20312a3ff63f2da1c52a0f1b18da2212c1%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2275dbbc901145d7beb190197aa232f74540e5a9f3%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20252%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
2796 }
2797 },
2798 {
2799 "response": {
2800 "headers": {
2801 "date": [
2802 "Tue, 18 Feb 2020 18:43:30 GMT"
2803 ],
2804 "strict-transport-security": [
2805 "max-age=0; includeSubdomains; preload"
2806 ],
2807 "x-frame-options": [
2808 "Deny"
2809 ],
2810 "content-type": [
2811 "application/json"
2812 ],
2813 "transfer-encoding": [
2814 "chunked"
2815 ],
2816 "x-xss-protection": [
2817 "1; mode=block"
2818 ],
2819 "expires": [
2820 "Sat, 01 Jan 2000 00:00:00 GMT"
2821 ],
2822 "x-content-type-options": [
2823 "nosniff"
2824 ],
2825 "referrer-policy": [
2826 "no-referrer"
2827 ],
2828 "server": [
2829 "Apache/2.4.10 (Debian)"
2830 ],
2831 "cache-control": [
2832 "no-store"
2833 ]
2834 },
2835 "status": {
2836 "code": 200,
2837 "message": "OK"
2838 },
2839 "body": {
2840 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2841 }
2842 },
2843 "request": {
2844 "headers": {
2845 "content-length": [
2846 "594"
2847 ],
2848 "host": [
2849 "phab.mercurial-scm.org"
2850 ],
2851 "content-type": [
2852 "application/x-www-form-urlencoded"
2853 ],
2854 "user-agent": [
2855 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2856 ],
2857 "accept": [
2858 "application/mercurial-0.1"
2859 ]
2860 },
2861 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2862 "method": "POST",
2863 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2290437c20312a3ff63f2da1c52a0f1b18da2212c1%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2290437c20312a3ff63f2da1c52a0f1b18da2212c1%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2275dbbc901145d7beb190197aa232f74540e5a9f3%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20252%2C+%22name%22%3A+%22local%3Acommits%22%7D"
2864 }
2865 },
2866 {
2867 "response": {
2868 "headers": {
2869 "date": [
2870 "Tue, 18 Feb 2020 18:43:31 GMT"
2871 ],
2872 "strict-transport-security": [
2873 "max-age=0; includeSubdomains; preload"
2874 ],
2875 "x-frame-options": [
2876 "Deny"
2877 ],
2878 "content-type": [
2879 "application/json"
2880 ],
2881 "transfer-encoding": [
2882 "chunked"
2883 ],
2884 "x-xss-protection": [
2885 "1; mode=block"
2886 ],
2887 "expires": [
2888 "Sat, 01 Jan 2000 00:00:00 GMT"
2889 ],
2890 "x-content-type-options": [
2891 "nosniff"
2892 ],
2893 "referrer-policy": [
2894 "no-referrer"
2895 ],
2896 "server": [
2897 "Apache/2.4.10 (Debian)"
2898 ],
2899 "cache-control": [
2900 "no-store"
2901 ]
2902 },
2903 "status": {
2904 "code": 200,
2905 "message": "OK"
2906 },
2907 "body": {
2908 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2909 }
2910 },
2911 "request": {
2912 "headers": {
2913 "content-length": [
2914 "482"
2915 ],
2916 "host": [
2917 "phab.mercurial-scm.org"
2918 ],
2919 "content-type": [
2920 "application/x-www-form-urlencoded"
2921 ],
2922 "user-agent": [
2923 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2924 ],
2925 "accept": [
2926 "application/mercurial-0.1"
2927 ]
2928 },
2929 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2930 "method": "POST",
2931 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22f391f4da4c61a54562f5e3dce83ea7f7bd38be31%5C%22%2C+%5C%22parent%5C%22%3A+%5C%2290437c20312a3ff63f2da1c52a0f1b18da2212c1%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20253%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
2932 }
2933 },
2934 {
2935 "response": {
2936 "headers": {
2937 "date": [
2938 "Tue, 18 Feb 2020 18:43:31 GMT"
2939 ],
2940 "strict-transport-security": [
2941 "max-age=0; includeSubdomains; preload"
2942 ],
2943 "x-frame-options": [
2944 "Deny"
2945 ],
2946 "content-type": [
2947 "application/json"
2948 ],
2949 "transfer-encoding": [
2950 "chunked"
2951 ],
2952 "x-xss-protection": [
2953 "1; mode=block"
2954 ],
2955 "expires": [
2956 "Sat, 01 Jan 2000 00:00:00 GMT"
2957 ],
2958 "x-content-type-options": [
2959 "nosniff"
2960 ],
2961 "referrer-policy": [
2962 "no-referrer"
2963 ],
2964 "server": [
2965 "Apache/2.4.10 (Debian)"
2966 ],
2967 "cache-control": [
2968 "no-store"
2969 ]
2970 },
2971 "status": {
2972 "code": 200,
2973 "message": "OK"
2974 },
2975 "body": {
2976 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
2977 }
2978 },
2979 "request": {
2980 "headers": {
2981 "content-length": [
2982 "594"
2983 ],
2984 "host": [
2985 "phab.mercurial-scm.org"
2986 ],
2987 "content-type": [
2988 "application/x-www-form-urlencoded"
2989 ],
2990 "user-agent": [
2991 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
2992 ],
2993 "accept": [
2994 "application/mercurial-0.1"
2995 ]
2996 },
2997 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
2998 "method": "POST",
2999 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22f391f4da4c61a54562f5e3dce83ea7f7bd38be31%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22f391f4da4c61a54562f5e3dce83ea7f7bd38be31%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%2290437c20312a3ff63f2da1c52a0f1b18da2212c1%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20253%2C+%22name%22%3A+%22local%3Acommits%22%7D"
3000 }
3001 },
3002 {
3003 "response": {
3004 "headers": {
3005 "date": [
3006 "Tue, 18 Feb 2020 18:43:32 GMT"
3007 ],
3008 "strict-transport-security": [
3009 "max-age=0; includeSubdomains; preload"
3010 ],
3011 "x-frame-options": [
3012 "Deny"
3013 ],
3014 "content-type": [
3015 "application/json"
3016 ],
3017 "transfer-encoding": [
3018 "chunked"
3019 ],
3020 "x-xss-protection": [
3021 "1; mode=block"
3022 ],
3023 "expires": [
3024 "Sat, 01 Jan 2000 00:00:00 GMT"
3025 ],
3026 "x-content-type-options": [
3027 "nosniff"
3028 ],
3029 "referrer-policy": [
3030 "no-referrer"
3031 ],
3032 "server": [
3033 "Apache/2.4.10 (Debian)"
3034 ],
3035 "cache-control": [
3036 "no-store"
3037 ]
3038 },
3039 "status": {
3040 "code": 200,
3041 "message": "OK"
3042 },
3043 "body": {
3044 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3045 }
3046 },
3047 "request": {
3048 "headers": {
3049 "content-length": [
3050 "482"
3051 ],
3052 "host": [
3053 "phab.mercurial-scm.org"
3054 ],
3055 "content-type": [
3056 "application/x-www-form-urlencoded"
3057 ],
3058 "user-agent": [
3059 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3060 ],
3061 "accept": [
3062 "application/mercurial-0.1"
3063 ]
3064 },
3065 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3066 "method": "POST",
3067 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22da86a9f3268c2bcd03a0efc5ef8f2171e3e741fc%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22f391f4da4c61a54562f5e3dce83ea7f7bd38be31%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20254%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
3068 }
3069 },
3070 {
3071 "response": {
3072 "headers": {
3073 "date": [
3074 "Tue, 18 Feb 2020 18:43:32 GMT"
3075 ],
3076 "strict-transport-security": [
3077 "max-age=0; includeSubdomains; preload"
3078 ],
3079 "x-frame-options": [
3080 "Deny"
3081 ],
3082 "content-type": [
3083 "application/json"
3084 ],
3085 "transfer-encoding": [
3086 "chunked"
3087 ],
3088 "x-xss-protection": [
3089 "1; mode=block"
3090 ],
3091 "expires": [
3092 "Sat, 01 Jan 2000 00:00:00 GMT"
3093 ],
3094 "x-content-type-options": [
3095 "nosniff"
3096 ],
3097 "referrer-policy": [
3098 "no-referrer"
3099 ],
3100 "server": [
3101 "Apache/2.4.10 (Debian)"
3102 ],
3103 "cache-control": [
3104 "no-store"
3105 ]
3106 },
3107 "status": {
3108 "code": 200,
3109 "message": "OK"
3110 },
3111 "body": {
3112 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3113 }
3114 },
3115 "request": {
3116 "headers": {
3117 "content-length": [
3118 "594"
3119 ],
3120 "host": [
3121 "phab.mercurial-scm.org"
3122 ],
3123 "content-type": [
3124 "application/x-www-form-urlencoded"
3125 ],
3126 "user-agent": [
3127 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3128 ],
3129 "accept": [
3130 "application/mercurial-0.1"
3131 ]
3132 },
3133 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3134 "method": "POST",
3135 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22da86a9f3268c2bcd03a0efc5ef8f2171e3e741fc%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22da86a9f3268c2bcd03a0efc5ef8f2171e3e741fc%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22f391f4da4c61a54562f5e3dce83ea7f7bd38be31%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20254%2C+%22name%22%3A+%22local%3Acommits%22%7D"
3136 }
3137 },
3138 {
3139 "response": {
3140 "headers": {
3141 "date": [
3142 "Tue, 18 Feb 2020 18:43:33 GMT"
3143 ],
3144 "strict-transport-security": [
3145 "max-age=0; includeSubdomains; preload"
3146 ],
3147 "x-frame-options": [
3148 "Deny"
3149 ],
3150 "content-type": [
3151 "application/json"
3152 ],
3153 "transfer-encoding": [
3154 "chunked"
3155 ],
3156 "x-xss-protection": [
3157 "1; mode=block"
3158 ],
3159 "expires": [
3160 "Sat, 01 Jan 2000 00:00:00 GMT"
3161 ],
3162 "x-content-type-options": [
3163 "nosniff"
3164 ],
3165 "referrer-policy": [
3166 "no-referrer"
3167 ],
3168 "server": [
3169 "Apache/2.4.10 (Debian)"
3170 ],
3171 "cache-control": [
3172 "no-store"
3173 ]
3174 },
3175 "status": {
3176 "code": 200,
3177 "message": "OK"
3178 },
3179 "body": {
3180 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3181 }
3182 },
3183 "request": {
3184 "headers": {
3185 "content-length": [
3186 "482"
3187 ],
3188 "host": [
3189 "phab.mercurial-scm.org"
3190 ],
3191 "content-type": [
3192 "application/x-www-form-urlencoded"
3193 ],
3194 "user-agent": [
3195 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3196 ],
3197 "accept": [
3198 "application/mercurial-0.1"
3199 ]
3200 },
3201 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3202 "method": "POST",
3203 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22003ffc16ba6612afd72ba30ad5054ee96316e149%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22da86a9f3268c2bcd03a0efc5ef8f2171e3e741fc%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20255%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
3204 }
3205 },
3206 {
3207 "response": {
3208 "headers": {
3209 "date": [
3210 "Tue, 18 Feb 2020 18:43:33 GMT"
3211 ],
3212 "strict-transport-security": [
3213 "max-age=0; includeSubdomains; preload"
3214 ],
3215 "x-frame-options": [
3216 "Deny"
3217 ],
3218 "content-type": [
3219 "application/json"
3220 ],
3221 "transfer-encoding": [
3222 "chunked"
3223 ],
3224 "x-xss-protection": [
3225 "1; mode=block"
3226 ],
3227 "expires": [
3228 "Sat, 01 Jan 2000 00:00:00 GMT"
3229 ],
3230 "x-content-type-options": [
3231 "nosniff"
3232 ],
3233 "referrer-policy": [
3234 "no-referrer"
3235 ],
3236 "server": [
3237 "Apache/2.4.10 (Debian)"
3238 ],
3239 "cache-control": [
3240 "no-store"
3241 ]
3242 },
3243 "status": {
3244 "code": 200,
3245 "message": "OK"
3246 },
3247 "body": {
3248 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3249 }
3250 },
3251 "request": {
3252 "headers": {
3253 "content-length": [
3254 "594"
3255 ],
3256 "host": [
3257 "phab.mercurial-scm.org"
3258 ],
3259 "content-type": [
3260 "application/x-www-form-urlencoded"
3261 ],
3262 "user-agent": [
3263 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3264 ],
3265 "accept": [
3266 "application/mercurial-0.1"
3267 ]
3268 },
3269 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3270 "method": "POST",
3271 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22003ffc16ba6612afd72ba30ad5054ee96316e149%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22003ffc16ba6612afd72ba30ad5054ee96316e149%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22da86a9f3268c2bcd03a0efc5ef8f2171e3e741fc%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20255%2C+%22name%22%3A+%22local%3Acommits%22%7D"
3272 }
3273 },
3274 {
3275 "response": {
3276 "headers": {
3277 "date": [
3278 "Tue, 18 Feb 2020 18:43:33 GMT"
3279 ],
3280 "strict-transport-security": [
3281 "max-age=0; includeSubdomains; preload"
3282 ],
3283 "x-frame-options": [
3284 "Deny"
3285 ],
3286 "content-type": [
3287 "application/json"
3288 ],
3289 "transfer-encoding": [
3290 "chunked"
3291 ],
3292 "x-xss-protection": [
3293 "1; mode=block"
3294 ],
3295 "expires": [
3296 "Sat, 01 Jan 2000 00:00:00 GMT"
3297 ],
3298 "x-content-type-options": [
3299 "nosniff"
3300 ],
3301 "referrer-policy": [
3302 "no-referrer"
3303 ],
3304 "server": [
3305 "Apache/2.4.10 (Debian)"
3306 ],
3307 "cache-control": [
3308 "no-store"
3309 ]
3310 },
3311 "status": {
3312 "code": 200,
3313 "message": "OK"
3314 },
3315 "body": {
3316 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3317 }
3318 },
3319 "request": {
3320 "headers": {
3321 "content-length": [
3322 "482"
3323 ],
3324 "host": [
3325 "phab.mercurial-scm.org"
3326 ],
3327 "content-type": [
3328 "application/x-www-form-urlencoded"
3329 ],
3330 "user-agent": [
3331 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3332 ],
3333 "accept": [
3334 "application/mercurial-0.1"
3335 ]
3336 },
3337 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3338 "method": "POST",
3339 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%2213bd750c36fadfe118f04a1f9639300393791443%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22003ffc16ba6612afd72ba30ad5054ee96316e149%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+20256%2C+%22name%22%3A+%22hg%3Ameta%22%7D"
3340 }
3341 },
3342 {
3343 "response": {
3344 "headers": {
3345 "date": [
3346 "Tue, 18 Feb 2020 18:43:34 GMT"
3347 ],
3348 "strict-transport-security": [
3349 "max-age=0; includeSubdomains; preload"
3350 ],
3351 "x-frame-options": [
3352 "Deny"
3353 ],
3354 "content-type": [
3355 "application/json"
3356 ],
3357 "transfer-encoding": [
3358 "chunked"
3359 ],
3360 "x-xss-protection": [
3361 "1; mode=block"
3362 ],
3363 "expires": [
3364 "Sat, 01 Jan 2000 00:00:00 GMT"
3365 ],
3366 "x-content-type-options": [
3367 "nosniff"
3368 ],
3369 "referrer-policy": [
3370 "no-referrer"
3371 ],
3372 "server": [
3373 "Apache/2.4.10 (Debian)"
3374 ],
3375 "cache-control": [
3376 "no-store"
3377 ]
3378 },
3379 "status": {
3380 "code": 200,
3381 "message": "OK"
3382 },
3383 "body": {
3384 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
3385 }
3386 },
3387 "request": {
3388 "headers": {
3389 "content-length": [
3390 "594"
3391 ],
3392 "host": [
3393 "phab.mercurial-scm.org"
3394 ],
3395 "content-type": [
3396 "application/x-www-form-urlencoded"
3397 ],
3398 "user-agent": [
3399 "mercurial/proto-1.0 (Mercurial 5.3+213-eda4eceb98c0+20200218)"
3400 ],
3401 "accept": [
3402 "application/mercurial-0.1"
3403 ]
3404 },
3405 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
3406 "method": "POST",
3407 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%2213bd750c36fadfe118f04a1f9639300393791443%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%2213bd750c36fadfe118f04a1f9639300393791443%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22003ffc16ba6612afd72ba30ad5054ee96316e149%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+20256%2C+%22name%22%3A+%22local%3Acommits%22%7D"
3408 }
3409 }
3410 ]
3411 } No newline at end of file
@@ -1,1797 +1,1797 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 inner.__name__ = fn.__name__
261 261 inner.__doc__ = fn.__doc__
262 262 return command(
263 263 name,
264 264 fullflags,
265 265 spec,
266 266 helpcategory=helpcategory,
267 267 optionalrepo=optionalrepo,
268 268 )(inner)
269 269
270 270 return decorate
271 271
272 272
273 273 def urlencodenested(params):
274 274 """like urlencode, but works with nested parameters.
275 275
276 276 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
277 277 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
278 278 urlencode. Note: the encoding is consistent with PHP's http_build_query.
279 279 """
280 280 flatparams = util.sortdict()
281 281
282 282 def process(prefix, obj):
283 283 if isinstance(obj, bool):
284 284 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
285 285 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
286 286 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
287 287 if items is None:
288 288 flatparams[prefix] = obj
289 289 else:
290 290 for k, v in items(obj):
291 291 if prefix:
292 292 process(b'%s[%s]' % (prefix, k), v)
293 293 else:
294 294 process(k, v)
295 295
296 296 process(b'', params)
297 297 return util.urlreq.urlencode(flatparams)
298 298
299 299
300 300 def readurltoken(ui):
301 301 """return conduit url, token and make sure they exist
302 302
303 303 Currently read from [auth] config section. In the future, it might
304 304 make sense to read from .arcconfig and .arcrc as well.
305 305 """
306 306 url = ui.config(b'phabricator', b'url')
307 307 if not url:
308 308 raise error.Abort(
309 309 _(b'config %s.%s is required') % (b'phabricator', b'url')
310 310 )
311 311
312 312 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
313 313 token = None
314 314
315 315 if res:
316 316 group, auth = res
317 317
318 318 ui.debug(b"using auth.%s.* for authentication\n" % group)
319 319
320 320 token = auth.get(b'phabtoken')
321 321
322 322 if not token:
323 323 raise error.Abort(
324 324 _(b'Can\'t find conduit token associated to %s') % (url,)
325 325 )
326 326
327 327 return url, token
328 328
329 329
330 330 def callconduit(ui, name, params):
331 331 """call Conduit API, params is a dict. return json.loads result, or None"""
332 332 host, token = readurltoken(ui)
333 333 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
334 334 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
335 335 params = params.copy()
336 336 params[b'__conduit__'] = {
337 337 b'token': token,
338 338 }
339 339 rawdata = {
340 340 b'params': templatefilters.json(params),
341 341 b'output': b'json',
342 342 b'__conduit__': 1,
343 343 }
344 344 data = urlencodenested(rawdata)
345 345 curlcmd = ui.config(b'phabricator', b'curlcmd')
346 346 if curlcmd:
347 347 sin, sout = procutil.popen2(
348 348 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
349 349 )
350 350 sin.write(data)
351 351 sin.close()
352 352 body = sout.read()
353 353 else:
354 354 urlopener = urlmod.opener(ui, authinfo)
355 355 request = util.urlreq.request(pycompat.strurl(url), data=data)
356 356 with contextlib.closing(urlopener.open(request)) as rsp:
357 357 body = rsp.read()
358 358 ui.debug(b'Conduit Response: %s\n' % body)
359 359 parsed = pycompat.rapply(
360 360 lambda x: encoding.unitolocal(x)
361 361 if isinstance(x, pycompat.unicode)
362 362 else x,
363 363 # json.loads only accepts bytes from py3.6+
364 364 pycompat.json_loads(encoding.unifromlocal(body)),
365 365 )
366 366 if parsed.get(b'error_code'):
367 367 msg = _(b'Conduit Error (%s): %s') % (
368 368 parsed[b'error_code'],
369 369 parsed[b'error_info'],
370 370 )
371 371 raise error.Abort(msg)
372 372 return parsed[b'result']
373 373
374 374
375 375 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
376 376 def debugcallconduit(ui, repo, name):
377 377 """call Conduit API
378 378
379 379 Call parameters are read from stdin as a JSON blob. Result will be written
380 380 to stdout as a JSON blob.
381 381 """
382 382 # json.loads only accepts bytes from 3.6+
383 383 rawparams = encoding.unifromlocal(ui.fin.read())
384 384 # json.loads only returns unicode strings
385 385 params = pycompat.rapply(
386 386 lambda x: encoding.unitolocal(x)
387 387 if isinstance(x, pycompat.unicode)
388 388 else x,
389 389 pycompat.json_loads(rawparams),
390 390 )
391 391 # json.dumps only accepts unicode strings
392 392 result = pycompat.rapply(
393 393 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
394 394 callconduit(ui, name, params),
395 395 )
396 396 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
397 397 ui.write(b'%s\n' % encoding.unitolocal(s))
398 398
399 399
400 400 def getrepophid(repo):
401 401 """given callsign, return repository PHID or None"""
402 402 # developer config: phabricator.repophid
403 403 repophid = repo.ui.config(b'phabricator', b'repophid')
404 404 if repophid:
405 405 return repophid
406 406 callsign = repo.ui.config(b'phabricator', b'callsign')
407 407 if not callsign:
408 408 return None
409 409 query = callconduit(
410 410 repo.ui,
411 411 b'diffusion.repository.search',
412 412 {b'constraints': {b'callsigns': [callsign]}},
413 413 )
414 414 if len(query[b'data']) == 0:
415 415 return None
416 416 repophid = query[b'data'][0][b'phid']
417 417 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
418 418 return repophid
419 419
420 420
421 421 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
422 422 _differentialrevisiondescre = re.compile(
423 423 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
424 424 )
425 425
426 426
427 427 def getoldnodedrevmap(repo, nodelist):
428 428 """find previous nodes that has been sent to Phabricator
429 429
430 430 return {node: (oldnode, Differential diff, Differential Revision ID)}
431 431 for node in nodelist with known previous sent versions, or associated
432 432 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
433 433 be ``None``.
434 434
435 435 Examines commit messages like "Differential Revision:" to get the
436 436 association information.
437 437
438 438 If such commit message line is not found, examines all precursors and their
439 439 tags. Tags with format like "D1234" are considered a match and the node
440 440 with that tag, and the number after "D" (ex. 1234) will be returned.
441 441
442 442 The ``old node``, if not None, is guaranteed to be the last diff of
443 443 corresponding Differential Revision, and exist in the repo.
444 444 """
445 445 unfi = repo.unfiltered()
446 446 has_node = unfi.changelog.index.has_node
447 447
448 448 result = {} # {node: (oldnode?, lastdiff?, drev)}
449 449 toconfirm = {} # {node: (force, {precnode}, drev)}
450 450 for node in nodelist:
451 451 ctx = unfi[node]
452 452 # For tags like "D123", put them into "toconfirm" to verify later
453 453 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
454 454 for n in precnodes:
455 455 if has_node(n):
456 456 for tag in unfi.nodetags(n):
457 457 m = _differentialrevisiontagre.match(tag)
458 458 if m:
459 459 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
460 460 break
461 461 else:
462 462 continue # move to next predecessor
463 463 break # found a tag, stop
464 464 else:
465 465 # Check commit message
466 466 m = _differentialrevisiondescre.search(ctx.description())
467 467 if m:
468 468 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
469 469
470 470 # Double check if tags are genuine by collecting all old nodes from
471 471 # Phabricator, and expect precursors overlap with it.
472 472 if toconfirm:
473 473 drevs = [drev for force, precs, drev in toconfirm.values()]
474 474 alldiffs = callconduit(
475 475 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
476 476 )
477 477 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
478 478 for newnode, (force, precset, drev) in toconfirm.items():
479 479 diffs = [
480 480 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
481 481 ]
482 482
483 483 # "precursors" as known by Phabricator
484 484 phprecset = set(getnode(d) for d in diffs)
485 485
486 486 # Ignore if precursors (Phabricator and local repo) do not overlap,
487 487 # and force is not set (when commit message says nothing)
488 488 if not force and not bool(phprecset & precset):
489 489 tagname = b'D%d' % drev
490 490 tags.tag(
491 491 repo,
492 492 tagname,
493 493 nullid,
494 494 message=None,
495 495 user=None,
496 496 date=None,
497 497 local=True,
498 498 )
499 499 unfi.ui.warn(
500 500 _(
501 501 b'D%d: local tag removed - does not match '
502 502 b'Differential history\n'
503 503 )
504 504 % drev
505 505 )
506 506 continue
507 507
508 508 # Find the last node using Phabricator metadata, and make sure it
509 509 # exists in the repo
510 510 oldnode = lastdiff = None
511 511 if diffs:
512 512 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
513 513 oldnode = getnode(lastdiff)
514 514 if oldnode and not has_node(oldnode):
515 515 oldnode = None
516 516
517 517 result[newnode] = (oldnode, lastdiff, drev)
518 518
519 519 return result
520 520
521 521
522 522 def getdrevmap(repo, revs):
523 523 """Return a dict mapping each rev in `revs` to their Differential Revision
524 524 ID or None.
525 525 """
526 526 result = {}
527 527 for rev in revs:
528 528 result[rev] = None
529 529 ctx = repo[rev]
530 530 # Check commit message
531 531 m = _differentialrevisiondescre.search(ctx.description())
532 532 if m:
533 533 result[rev] = int(m.group('id'))
534 534 continue
535 535 # Check tags
536 536 for tag in repo.nodetags(ctx.node()):
537 537 m = _differentialrevisiontagre.match(tag)
538 538 if m:
539 539 result[rev] = int(m.group(1))
540 540 break
541 541
542 542 return result
543 543
544 544
545 545 def getdiff(ctx, diffopts):
546 546 """plain-text diff without header (user, commit message, etc)"""
547 547 output = util.stringio()
548 548 for chunk, _label in patch.diffui(
549 549 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
550 550 ):
551 551 output.write(chunk)
552 552 return output.getvalue()
553 553
554 554
555 555 class DiffChangeType(object):
556 556 ADD = 1
557 557 CHANGE = 2
558 558 DELETE = 3
559 559 MOVE_AWAY = 4
560 560 COPY_AWAY = 5
561 561 MOVE_HERE = 6
562 562 COPY_HERE = 7
563 563 MULTICOPY = 8
564 564
565 565
566 566 class DiffFileType(object):
567 567 TEXT = 1
568 568 IMAGE = 2
569 569 BINARY = 3
570 570
571 571
572 572 @attr.s
573 573 class phabhunk(dict):
574 574 """Represents a Differential hunk, which is owned by a Differential change
575 575 """
576 576
577 577 oldOffset = attr.ib(default=0) # camelcase-required
578 578 oldLength = attr.ib(default=0) # camelcase-required
579 579 newOffset = attr.ib(default=0) # camelcase-required
580 580 newLength = attr.ib(default=0) # camelcase-required
581 581 corpus = attr.ib(default='')
582 582 # These get added to the phabchange's equivalents
583 583 addLines = attr.ib(default=0) # camelcase-required
584 584 delLines = attr.ib(default=0) # camelcase-required
585 585
586 586
587 587 @attr.s
588 588 class phabchange(object):
589 589 """Represents a Differential change, owns Differential hunks and owned by a
590 590 Differential diff. Each one represents one file in a diff.
591 591 """
592 592
593 593 currentPath = attr.ib(default=None) # camelcase-required
594 594 oldPath = attr.ib(default=None) # camelcase-required
595 595 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
596 596 metadata = attr.ib(default=attr.Factory(dict))
597 597 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
598 598 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
599 599 type = attr.ib(default=DiffChangeType.CHANGE)
600 600 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
601 601 commitHash = attr.ib(default=None) # camelcase-required
602 602 addLines = attr.ib(default=0) # camelcase-required
603 603 delLines = attr.ib(default=0) # camelcase-required
604 604 hunks = attr.ib(default=attr.Factory(list))
605 605
606 606 def copynewmetadatatoold(self):
607 607 for key in list(self.metadata.keys()):
608 608 newkey = key.replace(b'new:', b'old:')
609 609 self.metadata[newkey] = self.metadata[key]
610 610
611 611 def addoldmode(self, value):
612 612 self.oldProperties[b'unix:filemode'] = value
613 613
614 614 def addnewmode(self, value):
615 615 self.newProperties[b'unix:filemode'] = value
616 616
617 617 def addhunk(self, hunk):
618 618 if not isinstance(hunk, phabhunk):
619 619 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
620 620 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
621 621 # It's useful to include these stats since the Phab web UI shows them,
622 622 # and uses them to estimate how large a change a Revision is. Also used
623 623 # in email subjects for the [+++--] bit.
624 624 self.addLines += hunk.addLines
625 625 self.delLines += hunk.delLines
626 626
627 627
628 628 @attr.s
629 629 class phabdiff(object):
630 630 """Represents a Differential diff, owns Differential changes. Corresponds
631 631 to a commit.
632 632 """
633 633
634 634 # Doesn't seem to be any reason to send this (output of uname -n)
635 635 sourceMachine = attr.ib(default=b'') # camelcase-required
636 636 sourcePath = attr.ib(default=b'/') # camelcase-required
637 637 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
638 638 sourceControlPath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
640 640 branch = attr.ib(default=b'default')
641 641 bookmark = attr.ib(default=None)
642 642 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
643 643 lintStatus = attr.ib(default=b'none') # camelcase-required
644 644 unitStatus = attr.ib(default=b'none') # camelcase-required
645 645 changes = attr.ib(default=attr.Factory(dict))
646 646 repositoryPHID = attr.ib(default=None) # camelcase-required
647 647
648 648 def addchange(self, change):
649 649 if not isinstance(change, phabchange):
650 650 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
651 651 self.changes[change.currentPath] = pycompat.byteskwargs(
652 652 attr.asdict(change)
653 653 )
654 654
655 655
656 656 def maketext(pchange, ctx, fname):
657 657 """populate the phabchange for a text file"""
658 658 repo = ctx.repo()
659 659 fmatcher = match.exact([fname])
660 660 diffopts = mdiff.diffopts(git=True, context=32767)
661 661 _pfctx, _fctx, header, fhunks = next(
662 662 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
663 663 )
664 664
665 665 for fhunk in fhunks:
666 666 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
667 667 corpus = b''.join(lines[1:])
668 668 shunk = list(header)
669 669 shunk.extend(lines)
670 670 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
671 671 patch.diffstatdata(util.iterlines(shunk))
672 672 )
673 673 pchange.addhunk(
674 674 phabhunk(
675 675 oldOffset,
676 676 oldLength,
677 677 newOffset,
678 678 newLength,
679 679 corpus,
680 680 addLines,
681 681 delLines,
682 682 )
683 683 )
684 684
685 685
686 686 def uploadchunks(fctx, fphid):
687 687 """upload large binary files as separate chunks.
688 688 Phab requests chunking over 8MiB, and splits into 4MiB chunks
689 689 """
690 690 ui = fctx.repo().ui
691 691 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
692 692 with ui.makeprogress(
693 693 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
694 694 ) as progress:
695 695 for chunk in chunks:
696 696 progress.increment()
697 697 if chunk[b'complete']:
698 698 continue
699 699 bstart = int(chunk[b'byteStart'])
700 700 bend = int(chunk[b'byteEnd'])
701 701 callconduit(
702 702 ui,
703 703 b'file.uploadchunk',
704 704 {
705 705 b'filePHID': fphid,
706 706 b'byteStart': bstart,
707 707 b'data': base64.b64encode(fctx.data()[bstart:bend]),
708 708 b'dataEncoding': b'base64',
709 709 },
710 710 )
711 711
712 712
713 713 def uploadfile(fctx):
714 714 """upload binary files to Phabricator"""
715 715 repo = fctx.repo()
716 716 ui = repo.ui
717 717 fname = fctx.path()
718 718 size = fctx.size()
719 719 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
720 720
721 721 # an allocate call is required first to see if an upload is even required
722 722 # (Phab might already have it) and to determine if chunking is needed
723 723 allocateparams = {
724 724 b'name': fname,
725 725 b'contentLength': size,
726 726 b'contentHash': fhash,
727 727 }
728 728 filealloc = callconduit(ui, b'file.allocate', allocateparams)
729 729 fphid = filealloc[b'filePHID']
730 730
731 731 if filealloc[b'upload']:
732 732 ui.write(_(b'uploading %s\n') % bytes(fctx))
733 733 if not fphid:
734 734 uploadparams = {
735 735 b'name': fname,
736 736 b'data_base64': base64.b64encode(fctx.data()),
737 737 }
738 738 fphid = callconduit(ui, b'file.upload', uploadparams)
739 739 else:
740 740 uploadchunks(fctx, fphid)
741 741 else:
742 742 ui.debug(b'server already has %s\n' % bytes(fctx))
743 743
744 744 if not fphid:
745 745 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
746 746
747 747 return fphid
748 748
749 749
750 750 def addoldbinary(pchange, fctx):
751 751 """add the metadata for the previous version of a binary file to the
752 752 phabchange for the new version
753 753 """
754 754 oldfctx = fctx.p1()
755 755 if fctx.cmp(oldfctx):
756 756 # Files differ, add the old one
757 757 pchange.metadata[b'old:file:size'] = oldfctx.size()
758 758 mimeguess, _enc = mimetypes.guess_type(
759 759 encoding.unifromlocal(oldfctx.path())
760 760 )
761 761 if mimeguess:
762 762 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
763 763 mimeguess
764 764 )
765 765 fphid = uploadfile(oldfctx)
766 766 pchange.metadata[b'old:binary-phid'] = fphid
767 767 else:
768 768 # If it's left as IMAGE/BINARY web UI might try to display it
769 769 pchange.fileType = DiffFileType.TEXT
770 770 pchange.copynewmetadatatoold()
771 771
772 772
773 773 def makebinary(pchange, fctx):
774 774 """populate the phabchange for a binary file"""
775 775 pchange.fileType = DiffFileType.BINARY
776 776 fphid = uploadfile(fctx)
777 777 pchange.metadata[b'new:binary-phid'] = fphid
778 778 pchange.metadata[b'new:file:size'] = fctx.size()
779 779 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
780 780 if mimeguess:
781 781 mimeguess = pycompat.bytestr(mimeguess)
782 782 pchange.metadata[b'new:file:mime-type'] = mimeguess
783 783 if mimeguess.startswith(b'image/'):
784 784 pchange.fileType = DiffFileType.IMAGE
785 785
786 786
787 787 # Copied from mercurial/patch.py
788 788 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
789 789
790 790
791 791 def notutf8(fctx):
792 792 """detect non-UTF-8 text files since Phabricator requires them to be marked
793 793 as binary
794 794 """
795 795 try:
796 796 fctx.data().decode('utf-8')
797 797 if fctx.parents():
798 798 fctx.p1().data().decode('utf-8')
799 799 return False
800 800 except UnicodeDecodeError:
801 801 fctx.repo().ui.write(
802 802 _(b'file %s detected as non-UTF-8, marked as binary\n')
803 803 % fctx.path()
804 804 )
805 805 return True
806 806
807 807
808 808 def addremoved(pdiff, ctx, removed):
809 809 """add removed files to the phabdiff. Shouldn't include moves"""
810 810 for fname in removed:
811 811 pchange = phabchange(
812 812 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
813 813 )
814 814 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
815 815 fctx = ctx.p1()[fname]
816 816 if not (fctx.isbinary() or notutf8(fctx)):
817 817 maketext(pchange, ctx, fname)
818 818
819 819 pdiff.addchange(pchange)
820 820
821 821
822 822 def addmodified(pdiff, ctx, modified):
823 823 """add modified files to the phabdiff"""
824 824 for fname in modified:
825 825 fctx = ctx[fname]
826 826 pchange = phabchange(currentPath=fname, oldPath=fname)
827 827 filemode = gitmode[ctx[fname].flags()]
828 828 originalmode = gitmode[ctx.p1()[fname].flags()]
829 829 if filemode != originalmode:
830 830 pchange.addoldmode(originalmode)
831 831 pchange.addnewmode(filemode)
832 832
833 833 if fctx.isbinary() or notutf8(fctx):
834 834 makebinary(pchange, fctx)
835 835 addoldbinary(pchange, fctx)
836 836 else:
837 837 maketext(pchange, ctx, fname)
838 838
839 839 pdiff.addchange(pchange)
840 840
841 841
842 842 def addadded(pdiff, ctx, added, removed):
843 843 """add file adds to the phabdiff, both new files and copies/moves"""
844 844 # Keep track of files that've been recorded as moved/copied, so if there are
845 845 # additional copies we can mark them (moves get removed from removed)
846 846 copiedchanges = {}
847 847 movedchanges = {}
848 848 for fname in added:
849 849 fctx = ctx[fname]
850 850 pchange = phabchange(currentPath=fname)
851 851
852 852 filemode = gitmode[ctx[fname].flags()]
853 853 renamed = fctx.renamed()
854 854
855 855 if renamed:
856 856 originalfname = renamed[0]
857 857 originalmode = gitmode[ctx.p1()[originalfname].flags()]
858 858 pchange.oldPath = originalfname
859 859
860 860 if originalfname in removed:
861 861 origpchange = phabchange(
862 862 currentPath=originalfname,
863 863 oldPath=originalfname,
864 864 type=DiffChangeType.MOVE_AWAY,
865 865 awayPaths=[fname],
866 866 )
867 867 movedchanges[originalfname] = origpchange
868 868 removed.remove(originalfname)
869 869 pchange.type = DiffChangeType.MOVE_HERE
870 870 elif originalfname in movedchanges:
871 871 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
872 872 movedchanges[originalfname].awayPaths.append(fname)
873 873 pchange.type = DiffChangeType.COPY_HERE
874 874 else: # pure copy
875 875 if originalfname not in copiedchanges:
876 876 origpchange = phabchange(
877 877 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
878 878 )
879 879 copiedchanges[originalfname] = origpchange
880 880 else:
881 881 origpchange = copiedchanges[originalfname]
882 882 origpchange.awayPaths.append(fname)
883 883 pchange.type = DiffChangeType.COPY_HERE
884 884
885 885 if filemode != originalmode:
886 886 pchange.addoldmode(originalmode)
887 887 pchange.addnewmode(filemode)
888 888 else: # Brand-new file
889 889 pchange.addnewmode(gitmode[fctx.flags()])
890 890 pchange.type = DiffChangeType.ADD
891 891
892 892 if fctx.isbinary() or notutf8(fctx):
893 893 makebinary(pchange, fctx)
894 894 if renamed:
895 addoldbinary(pchange, fctx, originalfname)
895 addoldbinary(pchange, fctx)
896 896 else:
897 897 maketext(pchange, ctx, fname)
898 898
899 899 pdiff.addchange(pchange)
900 900
901 901 for _path, copiedchange in copiedchanges.items():
902 902 pdiff.addchange(copiedchange)
903 903 for _path, movedchange in movedchanges.items():
904 904 pdiff.addchange(movedchange)
905 905
906 906
907 907 def creatediff(ctx):
908 908 """create a Differential Diff"""
909 909 repo = ctx.repo()
910 910 repophid = getrepophid(repo)
911 911 # Create a "Differential Diff" via "differential.creatediff" API
912 912 pdiff = phabdiff(
913 913 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
914 914 branch=b'%s' % ctx.branch(),
915 915 )
916 916 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
917 917 # addadded will remove moved files from removed, so addremoved won't get
918 918 # them
919 919 addadded(pdiff, ctx, added, removed)
920 920 addmodified(pdiff, ctx, modified)
921 921 addremoved(pdiff, ctx, removed)
922 922 if repophid:
923 923 pdiff.repositoryPHID = repophid
924 924 diff = callconduit(
925 925 repo.ui,
926 926 b'differential.creatediff',
927 927 pycompat.byteskwargs(attr.asdict(pdiff)),
928 928 )
929 929 if not diff:
930 930 raise error.Abort(_(b'cannot create diff for %s') % ctx)
931 931 return diff
932 932
933 933
934 934 def writediffproperties(ctx, diff):
935 935 """write metadata to diff so patches could be applied losslessly"""
936 936 # creatediff returns with a diffid but query returns with an id
937 937 diffid = diff.get(b'diffid', diff.get(b'id'))
938 938 params = {
939 939 b'diff_id': diffid,
940 940 b'name': b'hg:meta',
941 941 b'data': templatefilters.json(
942 942 {
943 943 b'user': ctx.user(),
944 944 b'date': b'%d %d' % ctx.date(),
945 945 b'branch': ctx.branch(),
946 946 b'node': ctx.hex(),
947 947 b'parent': ctx.p1().hex(),
948 948 }
949 949 ),
950 950 }
951 951 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
952 952
953 953 params = {
954 954 b'diff_id': diffid,
955 955 b'name': b'local:commits',
956 956 b'data': templatefilters.json(
957 957 {
958 958 ctx.hex(): {
959 959 b'author': stringutil.person(ctx.user()),
960 960 b'authorEmail': stringutil.email(ctx.user()),
961 961 b'time': int(ctx.date()[0]),
962 962 b'commit': ctx.hex(),
963 963 b'parents': [ctx.p1().hex()],
964 964 b'branch': ctx.branch(),
965 965 },
966 966 }
967 967 ),
968 968 }
969 969 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
970 970
971 971
972 972 def createdifferentialrevision(
973 973 ctx,
974 974 revid=None,
975 975 parentrevphid=None,
976 976 oldnode=None,
977 977 olddiff=None,
978 978 actions=None,
979 979 comment=None,
980 980 ):
981 981 """create or update a Differential Revision
982 982
983 983 If revid is None, create a new Differential Revision, otherwise update
984 984 revid. If parentrevphid is not None, set it as a dependency.
985 985
986 986 If oldnode is not None, check if the patch content (without commit message
987 987 and metadata) has changed before creating another diff.
988 988
989 989 If actions is not None, they will be appended to the transaction.
990 990 """
991 991 repo = ctx.repo()
992 992 if oldnode:
993 993 diffopts = mdiff.diffopts(git=True, context=32767)
994 994 oldctx = repo.unfiltered()[oldnode]
995 995 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
996 996 else:
997 997 neednewdiff = True
998 998
999 999 transactions = []
1000 1000 if neednewdiff:
1001 1001 diff = creatediff(ctx)
1002 1002 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1003 1003 if comment:
1004 1004 transactions.append({b'type': b'comment', b'value': comment})
1005 1005 else:
1006 1006 # Even if we don't need to upload a new diff because the patch content
1007 1007 # does not change. We might still need to update its metadata so
1008 1008 # pushers could know the correct node metadata.
1009 1009 assert olddiff
1010 1010 diff = olddiff
1011 1011 writediffproperties(ctx, diff)
1012 1012
1013 1013 # Set the parent Revision every time, so commit re-ordering is picked-up
1014 1014 if parentrevphid:
1015 1015 transactions.append(
1016 1016 {b'type': b'parents.set', b'value': [parentrevphid]}
1017 1017 )
1018 1018
1019 1019 if actions:
1020 1020 transactions += actions
1021 1021
1022 1022 # Parse commit message and update related fields.
1023 1023 desc = ctx.description()
1024 1024 info = callconduit(
1025 1025 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1026 1026 )
1027 1027 for k, v in info[b'fields'].items():
1028 1028 if k in [b'title', b'summary', b'testPlan']:
1029 1029 transactions.append({b'type': k, b'value': v})
1030 1030
1031 1031 params = {b'transactions': transactions}
1032 1032 if revid is not None:
1033 1033 # Update an existing Differential Revision
1034 1034 params[b'objectIdentifier'] = revid
1035 1035
1036 1036 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1037 1037 if not revision:
1038 1038 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1039 1039
1040 1040 return revision, diff
1041 1041
1042 1042
1043 1043 def userphids(repo, names):
1044 1044 """convert user names to PHIDs"""
1045 1045 names = [name.lower() for name in names]
1046 1046 query = {b'constraints': {b'usernames': names}}
1047 1047 result = callconduit(repo.ui, b'user.search', query)
1048 1048 # username not found is not an error of the API. So check if we have missed
1049 1049 # some names here.
1050 1050 data = result[b'data']
1051 1051 resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
1052 1052 unresolved = set(names) - resolved
1053 1053 if unresolved:
1054 1054 raise error.Abort(
1055 1055 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1056 1056 )
1057 1057 return [entry[b'phid'] for entry in data]
1058 1058
1059 1059
1060 1060 @vcrcommand(
1061 1061 b'phabsend',
1062 1062 [
1063 1063 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1064 1064 (b'', b'amend', True, _(b'update commit messages')),
1065 1065 (b'', b'reviewer', [], _(b'specify reviewers')),
1066 1066 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1067 1067 (
1068 1068 b'm',
1069 1069 b'comment',
1070 1070 b'',
1071 1071 _(b'add a comment to Revisions with new/updated Diffs'),
1072 1072 ),
1073 1073 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1074 1074 ],
1075 1075 _(b'REV [OPTIONS]'),
1076 1076 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1077 1077 )
1078 1078 def phabsend(ui, repo, *revs, **opts):
1079 1079 """upload changesets to Phabricator
1080 1080
1081 1081 If there are multiple revisions specified, they will be send as a stack
1082 1082 with a linear dependencies relationship using the order specified by the
1083 1083 revset.
1084 1084
1085 1085 For the first time uploading changesets, local tags will be created to
1086 1086 maintain the association. After the first time, phabsend will check
1087 1087 obsstore and tags information so it can figure out whether to update an
1088 1088 existing Differential Revision, or create a new one.
1089 1089
1090 1090 If --amend is set, update commit messages so they have the
1091 1091 ``Differential Revision`` URL, remove related tags. This is similar to what
1092 1092 arcanist will do, and is more desired in author-push workflows. Otherwise,
1093 1093 use local tags to record the ``Differential Revision`` association.
1094 1094
1095 1095 The --confirm option lets you confirm changesets before sending them. You
1096 1096 can also add following to your configuration file to make it default
1097 1097 behaviour::
1098 1098
1099 1099 [phabsend]
1100 1100 confirm = true
1101 1101
1102 1102 phabsend will check obsstore and the above association to decide whether to
1103 1103 update an existing Differential Revision, or create a new one.
1104 1104 """
1105 1105 opts = pycompat.byteskwargs(opts)
1106 1106 revs = list(revs) + opts.get(b'rev', [])
1107 1107 revs = scmutil.revrange(repo, revs)
1108 1108 revs.sort() # ascending order to preserve topological parent/child in phab
1109 1109
1110 1110 if not revs:
1111 1111 raise error.Abort(_(b'phabsend requires at least one changeset'))
1112 1112 if opts.get(b'amend'):
1113 1113 cmdutil.checkunfinished(repo)
1114 1114
1115 1115 # {newnode: (oldnode, olddiff, olddrev}
1116 1116 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1117 1117
1118 1118 confirm = ui.configbool(b'phabsend', b'confirm')
1119 1119 confirm |= bool(opts.get(b'confirm'))
1120 1120 if confirm:
1121 1121 confirmed = _confirmbeforesend(repo, revs, oldmap)
1122 1122 if not confirmed:
1123 1123 raise error.Abort(_(b'phabsend cancelled'))
1124 1124
1125 1125 actions = []
1126 1126 reviewers = opts.get(b'reviewer', [])
1127 1127 blockers = opts.get(b'blocker', [])
1128 1128 phids = []
1129 1129 if reviewers:
1130 1130 phids.extend(userphids(repo, reviewers))
1131 1131 if blockers:
1132 1132 phids.extend(
1133 1133 map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
1134 1134 )
1135 1135 if phids:
1136 1136 actions.append({b'type': b'reviewers.add', b'value': phids})
1137 1137
1138 1138 drevids = [] # [int]
1139 1139 diffmap = {} # {newnode: diff}
1140 1140
1141 1141 # Send patches one by one so we know their Differential Revision PHIDs and
1142 1142 # can provide dependency relationship
1143 1143 lastrevphid = None
1144 1144 for rev in revs:
1145 1145 ui.debug(b'sending rev %d\n' % rev)
1146 1146 ctx = repo[rev]
1147 1147
1148 1148 # Get Differential Revision ID
1149 1149 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1150 1150 if oldnode != ctx.node() or opts.get(b'amend'):
1151 1151 # Create or update Differential Revision
1152 1152 revision, diff = createdifferentialrevision(
1153 1153 ctx,
1154 1154 revid,
1155 1155 lastrevphid,
1156 1156 oldnode,
1157 1157 olddiff,
1158 1158 actions,
1159 1159 opts.get(b'comment'),
1160 1160 )
1161 1161 diffmap[ctx.node()] = diff
1162 1162 newrevid = int(revision[b'object'][b'id'])
1163 1163 newrevphid = revision[b'object'][b'phid']
1164 1164 if revid:
1165 1165 action = b'updated'
1166 1166 else:
1167 1167 action = b'created'
1168 1168
1169 1169 # Create a local tag to note the association, if commit message
1170 1170 # does not have it already
1171 1171 m = _differentialrevisiondescre.search(ctx.description())
1172 1172 if not m or int(m.group('id')) != newrevid:
1173 1173 tagname = b'D%d' % newrevid
1174 1174 tags.tag(
1175 1175 repo,
1176 1176 tagname,
1177 1177 ctx.node(),
1178 1178 message=None,
1179 1179 user=None,
1180 1180 date=None,
1181 1181 local=True,
1182 1182 )
1183 1183 else:
1184 1184 # Nothing changed. But still set "newrevphid" so the next revision
1185 1185 # could depend on this one and "newrevid" for the summary line.
1186 1186 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid']
1187 1187 newrevid = revid
1188 1188 action = b'skipped'
1189 1189
1190 1190 actiondesc = ui.label(
1191 1191 {
1192 1192 b'created': _(b'created'),
1193 1193 b'skipped': _(b'skipped'),
1194 1194 b'updated': _(b'updated'),
1195 1195 }[action],
1196 1196 b'phabricator.action.%s' % action,
1197 1197 )
1198 1198 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1199 1199 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1200 1200 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1201 1201 ui.write(
1202 1202 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1203 1203 )
1204 1204 drevids.append(newrevid)
1205 1205 lastrevphid = newrevphid
1206 1206
1207 1207 # Update commit messages and remove tags
1208 1208 if opts.get(b'amend'):
1209 1209 unfi = repo.unfiltered()
1210 1210 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1211 1211 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1212 1212 wnode = unfi[b'.'].node()
1213 1213 mapping = {} # {oldnode: [newnode]}
1214 1214 for i, rev in enumerate(revs):
1215 1215 old = unfi[rev]
1216 1216 drevid = drevids[i]
1217 1217 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1218 1218 newdesc = getdescfromdrev(drev)
1219 1219 # Make sure commit message contain "Differential Revision"
1220 1220 if old.description() != newdesc:
1221 1221 if old.phase() == phases.public:
1222 1222 ui.warn(
1223 1223 _(b"warning: not updating public commit %s\n")
1224 1224 % scmutil.formatchangeid(old)
1225 1225 )
1226 1226 continue
1227 1227 parents = [
1228 1228 mapping.get(old.p1().node(), (old.p1(),))[0],
1229 1229 mapping.get(old.p2().node(), (old.p2(),))[0],
1230 1230 ]
1231 1231 new = context.metadataonlyctx(
1232 1232 repo,
1233 1233 old,
1234 1234 parents=parents,
1235 1235 text=newdesc,
1236 1236 user=old.user(),
1237 1237 date=old.date(),
1238 1238 extra=old.extra(),
1239 1239 )
1240 1240
1241 1241 newnode = new.commit()
1242 1242
1243 1243 mapping[old.node()] = [newnode]
1244 1244 # Update diff property
1245 1245 # If it fails just warn and keep going, otherwise the DREV
1246 1246 # associations will be lost
1247 1247 try:
1248 1248 writediffproperties(unfi[newnode], diffmap[old.node()])
1249 1249 except util.urlerr.urlerror:
1250 1250 ui.warnnoi18n(
1251 1251 b'Failed to update metadata for D%d\n' % drevid
1252 1252 )
1253 1253 # Remove local tags since it's no longer necessary
1254 1254 tagname = b'D%d' % drevid
1255 1255 if tagname in repo.tags():
1256 1256 tags.tag(
1257 1257 repo,
1258 1258 tagname,
1259 1259 nullid,
1260 1260 message=None,
1261 1261 user=None,
1262 1262 date=None,
1263 1263 local=True,
1264 1264 )
1265 1265 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1266 1266 if wnode in mapping:
1267 1267 unfi.setparents(mapping[wnode][0])
1268 1268
1269 1269
1270 1270 # Map from "hg:meta" keys to header understood by "hg import". The order is
1271 1271 # consistent with "hg export" output.
1272 1272 _metanamemap = util.sortdict(
1273 1273 [
1274 1274 (b'user', b'User'),
1275 1275 (b'date', b'Date'),
1276 1276 (b'branch', b'Branch'),
1277 1277 (b'node', b'Node ID'),
1278 1278 (b'parent', b'Parent '),
1279 1279 ]
1280 1280 )
1281 1281
1282 1282
1283 1283 def _confirmbeforesend(repo, revs, oldmap):
1284 1284 url, token = readurltoken(repo.ui)
1285 1285 ui = repo.ui
1286 1286 for rev in revs:
1287 1287 ctx = repo[rev]
1288 1288 desc = ctx.description().splitlines()[0]
1289 1289 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1290 1290 if drevid:
1291 1291 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1292 1292 else:
1293 1293 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1294 1294
1295 1295 ui.write(
1296 1296 _(b'%s - %s: %s\n')
1297 1297 % (
1298 1298 drevdesc,
1299 1299 ui.label(bytes(ctx), b'phabricator.node'),
1300 1300 ui.label(desc, b'phabricator.desc'),
1301 1301 )
1302 1302 )
1303 1303
1304 1304 if ui.promptchoice(
1305 1305 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1306 1306 ):
1307 1307 return False
1308 1308
1309 1309 return True
1310 1310
1311 1311
1312 1312 _knownstatusnames = {
1313 1313 b'accepted',
1314 1314 b'needsreview',
1315 1315 b'needsrevision',
1316 1316 b'closed',
1317 1317 b'abandoned',
1318 1318 b'changesplanned',
1319 1319 }
1320 1320
1321 1321
1322 1322 def _getstatusname(drev):
1323 1323 """get normalized status name from a Differential Revision"""
1324 1324 return drev[b'statusName'].replace(b' ', b'').lower()
1325 1325
1326 1326
1327 1327 # Small language to specify differential revisions. Support symbols: (), :X,
1328 1328 # +, and -.
1329 1329
1330 1330 _elements = {
1331 1331 # token-type: binding-strength, primary, prefix, infix, suffix
1332 1332 b'(': (12, None, (b'group', 1, b')'), None, None),
1333 1333 b':': (8, None, (b'ancestors', 8), None, None),
1334 1334 b'&': (5, None, None, (b'and_', 5), None),
1335 1335 b'+': (4, None, None, (b'add', 4), None),
1336 1336 b'-': (4, None, None, (b'sub', 4), None),
1337 1337 b')': (0, None, None, None, None),
1338 1338 b'symbol': (0, b'symbol', None, None, None),
1339 1339 b'end': (0, None, None, None, None),
1340 1340 }
1341 1341
1342 1342
1343 1343 def _tokenize(text):
1344 1344 view = memoryview(text) # zero-copy slice
1345 1345 special = b'():+-& '
1346 1346 pos = 0
1347 1347 length = len(text)
1348 1348 while pos < length:
1349 1349 symbol = b''.join(
1350 1350 itertools.takewhile(
1351 1351 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1352 1352 )
1353 1353 )
1354 1354 if symbol:
1355 1355 yield (b'symbol', symbol, pos)
1356 1356 pos += len(symbol)
1357 1357 else: # special char, ignore space
1358 1358 if text[pos : pos + 1] != b' ':
1359 1359 yield (text[pos : pos + 1], None, pos)
1360 1360 pos += 1
1361 1361 yield (b'end', None, pos)
1362 1362
1363 1363
1364 1364 def _parse(text):
1365 1365 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1366 1366 if pos != len(text):
1367 1367 raise error.ParseError(b'invalid token', pos)
1368 1368 return tree
1369 1369
1370 1370
1371 1371 def _parsedrev(symbol):
1372 1372 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1373 1373 if symbol.startswith(b'D') and symbol[1:].isdigit():
1374 1374 return int(symbol[1:])
1375 1375 if symbol.isdigit():
1376 1376 return int(symbol)
1377 1377
1378 1378
1379 1379 def _prefetchdrevs(tree):
1380 1380 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1381 1381 drevs = set()
1382 1382 ancestordrevs = set()
1383 1383 op = tree[0]
1384 1384 if op == b'symbol':
1385 1385 r = _parsedrev(tree[1])
1386 1386 if r:
1387 1387 drevs.add(r)
1388 1388 elif op == b'ancestors':
1389 1389 r, a = _prefetchdrevs(tree[1])
1390 1390 drevs.update(r)
1391 1391 ancestordrevs.update(r)
1392 1392 ancestordrevs.update(a)
1393 1393 else:
1394 1394 for t in tree[1:]:
1395 1395 r, a = _prefetchdrevs(t)
1396 1396 drevs.update(r)
1397 1397 ancestordrevs.update(a)
1398 1398 return drevs, ancestordrevs
1399 1399
1400 1400
1401 1401 def querydrev(repo, spec):
1402 1402 """return a list of "Differential Revision" dicts
1403 1403
1404 1404 spec is a string using a simple query language, see docstring in phabread
1405 1405 for details.
1406 1406
1407 1407 A "Differential Revision dict" looks like:
1408 1408
1409 1409 {
1410 1410 "id": "2",
1411 1411 "phid": "PHID-DREV-672qvysjcczopag46qty",
1412 1412 "title": "example",
1413 1413 "uri": "https://phab.example.com/D2",
1414 1414 "dateCreated": "1499181406",
1415 1415 "dateModified": "1499182103",
1416 1416 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1417 1417 "status": "0",
1418 1418 "statusName": "Needs Review",
1419 1419 "properties": [],
1420 1420 "branch": null,
1421 1421 "summary": "",
1422 1422 "testPlan": "",
1423 1423 "lineCount": "2",
1424 1424 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1425 1425 "diffs": [
1426 1426 "3",
1427 1427 "4",
1428 1428 ],
1429 1429 "commits": [],
1430 1430 "reviewers": [],
1431 1431 "ccs": [],
1432 1432 "hashes": [],
1433 1433 "auxiliary": {
1434 1434 "phabricator:projects": [],
1435 1435 "phabricator:depends-on": [
1436 1436 "PHID-DREV-gbapp366kutjebt7agcd"
1437 1437 ]
1438 1438 },
1439 1439 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1440 1440 "sourcePath": null
1441 1441 }
1442 1442 """
1443 1443
1444 1444 def fetch(params):
1445 1445 """params -> single drev or None"""
1446 1446 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1447 1447 if key in prefetched:
1448 1448 return prefetched[key]
1449 1449 drevs = callconduit(repo.ui, b'differential.query', params)
1450 1450 # Fill prefetched with the result
1451 1451 for drev in drevs:
1452 1452 prefetched[drev[b'phid']] = drev
1453 1453 prefetched[int(drev[b'id'])] = drev
1454 1454 if key not in prefetched:
1455 1455 raise error.Abort(
1456 1456 _(b'cannot get Differential Revision %r') % params
1457 1457 )
1458 1458 return prefetched[key]
1459 1459
1460 1460 def getstack(topdrevids):
1461 1461 """given a top, get a stack from the bottom, [id] -> [id]"""
1462 1462 visited = set()
1463 1463 result = []
1464 1464 queue = [{b'ids': [i]} for i in topdrevids]
1465 1465 while queue:
1466 1466 params = queue.pop()
1467 1467 drev = fetch(params)
1468 1468 if drev[b'id'] in visited:
1469 1469 continue
1470 1470 visited.add(drev[b'id'])
1471 1471 result.append(int(drev[b'id']))
1472 1472 auxiliary = drev.get(b'auxiliary', {})
1473 1473 depends = auxiliary.get(b'phabricator:depends-on', [])
1474 1474 for phid in depends:
1475 1475 queue.append({b'phids': [phid]})
1476 1476 result.reverse()
1477 1477 return smartset.baseset(result)
1478 1478
1479 1479 # Initialize prefetch cache
1480 1480 prefetched = {} # {id or phid: drev}
1481 1481
1482 1482 tree = _parse(spec)
1483 1483 drevs, ancestordrevs = _prefetchdrevs(tree)
1484 1484
1485 1485 # developer config: phabricator.batchsize
1486 1486 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
1487 1487
1488 1488 # Prefetch Differential Revisions in batch
1489 1489 tofetch = set(drevs)
1490 1490 for r in ancestordrevs:
1491 1491 tofetch.update(range(max(1, r - batchsize), r + 1))
1492 1492 if drevs:
1493 1493 fetch({b'ids': list(tofetch)})
1494 1494 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1495 1495
1496 1496 # Walk through the tree, return smartsets
1497 1497 def walk(tree):
1498 1498 op = tree[0]
1499 1499 if op == b'symbol':
1500 1500 drev = _parsedrev(tree[1])
1501 1501 if drev:
1502 1502 return smartset.baseset([drev])
1503 1503 elif tree[1] in _knownstatusnames:
1504 1504 drevs = [
1505 1505 r
1506 1506 for r in validids
1507 1507 if _getstatusname(prefetched[r]) == tree[1]
1508 1508 ]
1509 1509 return smartset.baseset(drevs)
1510 1510 else:
1511 1511 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1512 1512 elif op in {b'and_', b'add', b'sub'}:
1513 1513 assert len(tree) == 3
1514 1514 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1515 1515 elif op == b'group':
1516 1516 return walk(tree[1])
1517 1517 elif op == b'ancestors':
1518 1518 return getstack(walk(tree[1]))
1519 1519 else:
1520 1520 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1521 1521
1522 1522 return [prefetched[r] for r in walk(tree)]
1523 1523
1524 1524
1525 1525 def getdescfromdrev(drev):
1526 1526 """get description (commit message) from "Differential Revision"
1527 1527
1528 1528 This is similar to differential.getcommitmessage API. But we only care
1529 1529 about limited fields: title, summary, test plan, and URL.
1530 1530 """
1531 1531 title = drev[b'title']
1532 1532 summary = drev[b'summary'].rstrip()
1533 1533 testplan = drev[b'testPlan'].rstrip()
1534 1534 if testplan:
1535 1535 testplan = b'Test Plan:\n%s' % testplan
1536 1536 uri = b'Differential Revision: %s' % drev[b'uri']
1537 1537 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1538 1538
1539 1539
1540 1540 def getdiffmeta(diff):
1541 1541 """get commit metadata (date, node, user, p1) from a diff object
1542 1542
1543 1543 The metadata could be "hg:meta", sent by phabsend, like:
1544 1544
1545 1545 "properties": {
1546 1546 "hg:meta": {
1547 1547 "date": "1499571514 25200",
1548 1548 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1549 1549 "user": "Foo Bar <foo@example.com>",
1550 1550 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1551 1551 }
1552 1552 }
1553 1553
1554 1554 Or converted from "local:commits", sent by "arc", like:
1555 1555
1556 1556 "properties": {
1557 1557 "local:commits": {
1558 1558 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1559 1559 "author": "Foo Bar",
1560 1560 "time": 1499546314,
1561 1561 "branch": "default",
1562 1562 "tag": "",
1563 1563 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1564 1564 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1565 1565 "local": "1000",
1566 1566 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1567 1567 "summary": "...",
1568 1568 "message": "...",
1569 1569 "authorEmail": "foo@example.com"
1570 1570 }
1571 1571 }
1572 1572 }
1573 1573
1574 1574 Note: metadata extracted from "local:commits" will lose time zone
1575 1575 information.
1576 1576 """
1577 1577 props = diff.get(b'properties') or {}
1578 1578 meta = props.get(b'hg:meta')
1579 1579 if not meta:
1580 1580 if props.get(b'local:commits'):
1581 1581 commit = sorted(props[b'local:commits'].values())[0]
1582 1582 meta = {}
1583 1583 if b'author' in commit and b'authorEmail' in commit:
1584 1584 meta[b'user'] = b'%s <%s>' % (
1585 1585 commit[b'author'],
1586 1586 commit[b'authorEmail'],
1587 1587 )
1588 1588 if b'time' in commit:
1589 1589 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1590 1590 if b'branch' in commit:
1591 1591 meta[b'branch'] = commit[b'branch']
1592 1592 node = commit.get(b'commit', commit.get(b'rev'))
1593 1593 if node:
1594 1594 meta[b'node'] = node
1595 1595 if len(commit.get(b'parents', ())) >= 1:
1596 1596 meta[b'parent'] = commit[b'parents'][0]
1597 1597 else:
1598 1598 meta = {}
1599 1599 if b'date' not in meta and b'dateCreated' in diff:
1600 1600 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1601 1601 if b'branch' not in meta and diff.get(b'branch'):
1602 1602 meta[b'branch'] = diff[b'branch']
1603 1603 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1604 1604 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1605 1605 return meta
1606 1606
1607 1607
1608 1608 def readpatch(repo, drevs, write):
1609 1609 """generate plain-text patch readable by 'hg import'
1610 1610
1611 1611 write is usually ui.write. drevs is what "querydrev" returns, results of
1612 1612 "differential.query".
1613 1613 """
1614 1614 # Prefetch hg:meta property for all diffs
1615 1615 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs))
1616 1616 diffs = callconduit(repo.ui, b'differential.querydiffs', {b'ids': diffids})
1617 1617
1618 1618 # Generate patch for each drev
1619 1619 for drev in drevs:
1620 1620 repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
1621 1621
1622 1622 diffid = max(int(v) for v in drev[b'diffs'])
1623 1623 body = callconduit(
1624 1624 repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
1625 1625 )
1626 1626 desc = getdescfromdrev(drev)
1627 1627 header = b'# HG changeset patch\n'
1628 1628
1629 1629 # Try to preserve metadata from hg:meta property. Write hg patch
1630 1630 # headers that can be read by the "import" command. See patchheadermap
1631 1631 # and extract in mercurial/patch.py for supported headers.
1632 1632 meta = getdiffmeta(diffs[b'%d' % diffid])
1633 1633 for k in _metanamemap.keys():
1634 1634 if k in meta:
1635 1635 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1636 1636
1637 1637 content = b'%s%s\n%s' % (header, desc, body)
1638 1638 write(content)
1639 1639
1640 1640
1641 1641 @vcrcommand(
1642 1642 b'phabread',
1643 1643 [(b'', b'stack', False, _(b'read dependencies'))],
1644 1644 _(b'DREVSPEC [OPTIONS]'),
1645 1645 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1646 1646 )
1647 1647 def phabread(ui, repo, spec, **opts):
1648 1648 """print patches from Phabricator suitable for importing
1649 1649
1650 1650 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1651 1651 the number ``123``. It could also have common operators like ``+``, ``-``,
1652 1652 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1653 1653 select a stack.
1654 1654
1655 1655 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1656 1656 could be used to filter patches by status. For performance reason, they
1657 1657 only represent a subset of non-status selections and cannot be used alone.
1658 1658
1659 1659 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1660 1660 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1661 1661 stack up to D9.
1662 1662
1663 1663 If --stack is given, follow dependencies information and read all patches.
1664 1664 It is equivalent to the ``:`` operator.
1665 1665 """
1666 1666 opts = pycompat.byteskwargs(opts)
1667 1667 if opts.get(b'stack'):
1668 1668 spec = b':(%s)' % spec
1669 1669 drevs = querydrev(repo, spec)
1670 1670 readpatch(repo, drevs, ui.write)
1671 1671
1672 1672
1673 1673 @vcrcommand(
1674 1674 b'phabupdate',
1675 1675 [
1676 1676 (b'', b'accept', False, _(b'accept revisions')),
1677 1677 (b'', b'reject', False, _(b'reject revisions')),
1678 1678 (b'', b'abandon', False, _(b'abandon revisions')),
1679 1679 (b'', b'reclaim', False, _(b'reclaim revisions')),
1680 1680 (b'm', b'comment', b'', _(b'comment on the last revision')),
1681 1681 ],
1682 1682 _(b'DREVSPEC [OPTIONS]'),
1683 1683 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1684 1684 )
1685 1685 def phabupdate(ui, repo, spec, **opts):
1686 1686 """update Differential Revision in batch
1687 1687
1688 1688 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1689 1689 """
1690 1690 opts = pycompat.byteskwargs(opts)
1691 1691 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1692 1692 if len(flags) > 1:
1693 1693 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1694 1694
1695 1695 actions = []
1696 1696 for f in flags:
1697 1697 actions.append({b'type': f, b'value': True})
1698 1698
1699 1699 drevs = querydrev(repo, spec)
1700 1700 for i, drev in enumerate(drevs):
1701 1701 if i + 1 == len(drevs) and opts.get(b'comment'):
1702 1702 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1703 1703 if actions:
1704 1704 params = {
1705 1705 b'objectIdentifier': drev[b'phid'],
1706 1706 b'transactions': actions,
1707 1707 }
1708 1708 callconduit(ui, b'differential.revision.edit', params)
1709 1709
1710 1710
1711 1711 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1712 1712 def template_review(context, mapping):
1713 1713 """:phabreview: Object describing the review for this changeset.
1714 1714 Has attributes `url` and `id`.
1715 1715 """
1716 1716 ctx = context.resource(mapping, b'ctx')
1717 1717 m = _differentialrevisiondescre.search(ctx.description())
1718 1718 if m:
1719 1719 return templateutil.hybriddict(
1720 1720 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1721 1721 )
1722 1722 else:
1723 1723 tags = ctx.repo().nodetags(ctx.node())
1724 1724 for t in tags:
1725 1725 if _differentialrevisiontagre.match(t):
1726 1726 url = ctx.repo().ui.config(b'phabricator', b'url')
1727 1727 if not url.endswith(b'/'):
1728 1728 url += b'/'
1729 1729 url += t
1730 1730
1731 1731 return templateutil.hybriddict({b'url': url, b'id': t,})
1732 1732 return None
1733 1733
1734 1734
1735 1735 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1736 1736 def template_status(context, mapping):
1737 1737 """:phabstatus: String. Status of Phabricator differential.
1738 1738 """
1739 1739 ctx = context.resource(mapping, b'ctx')
1740 1740 repo = context.resource(mapping, b'repo')
1741 1741 ui = context.resource(mapping, b'ui')
1742 1742
1743 1743 rev = ctx.rev()
1744 1744 try:
1745 1745 drevid = getdrevmap(repo, [rev])[rev]
1746 1746 except KeyError:
1747 1747 return None
1748 1748 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1749 1749 for drev in drevs:
1750 1750 if int(drev[b'id']) == drevid:
1751 1751 return templateutil.hybriddict(
1752 1752 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1753 1753 )
1754 1754 return None
1755 1755
1756 1756
1757 1757 @show.showview(b'phabstatus', csettopic=b'work')
1758 1758 def phabstatusshowview(ui, repo, displayer):
1759 1759 """Phabricator differiential status"""
1760 1760 revs = repo.revs('sort(_underway(), topo)')
1761 1761 drevmap = getdrevmap(repo, revs)
1762 1762 unknownrevs, drevids, revsbydrevid = [], set([]), {}
1763 1763 for rev, drevid in pycompat.iteritems(drevmap):
1764 1764 if drevid is not None:
1765 1765 drevids.add(drevid)
1766 1766 revsbydrevid.setdefault(drevid, set([])).add(rev)
1767 1767 else:
1768 1768 unknownrevs.append(rev)
1769 1769
1770 1770 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1771 1771 drevsbyrev = {}
1772 1772 for drev in drevs:
1773 1773 for rev in revsbydrevid[int(drev[b'id'])]:
1774 1774 drevsbyrev[rev] = drev
1775 1775
1776 1776 def phabstatus(ctx):
1777 1777 drev = drevsbyrev[ctx.rev()]
1778 1778 status = ui.label(
1779 1779 b'%(statusName)s' % drev,
1780 1780 b'phabricator.status.%s' % _getstatusname(drev),
1781 1781 )
1782 1782 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1783 1783
1784 1784 revs -= smartset.baseset(unknownrevs)
1785 1785 revdag = graphmod.dagwalker(repo, revs)
1786 1786
1787 1787 ui.setconfig(b'experimental', b'graphshorten', True)
1788 1788 displayer._exthook = phabstatus
1789 1789 nodelen = show.longestshortest(repo, revs)
1790 1790 logcmdutil.displaygraph(
1791 1791 ui,
1792 1792 repo,
1793 1793 revdag,
1794 1794 displayer,
1795 1795 graphmod.asciiedges,
1796 1796 props={b'nodelen': nodelen},
1797 1797 )
@@ -1,4185 +1,4190 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy as copymod
11 11 import errno
12 12 import os
13 13 import re
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 hex,
18 18 nullid,
19 19 nullrev,
20 20 short,
21 21 )
22 22 from .pycompat import (
23 23 getattr,
24 24 open,
25 25 setattr,
26 26 )
27 27 from .thirdparty import attr
28 28
29 29 from . import (
30 30 bookmarks,
31 31 changelog,
32 32 copies,
33 33 crecord as crecordmod,
34 34 dirstateguard,
35 35 encoding,
36 36 error,
37 37 formatter,
38 38 logcmdutil,
39 39 match as matchmod,
40 40 merge as mergemod,
41 41 mergeutil,
42 42 obsolete,
43 43 patch,
44 44 pathutil,
45 45 phases,
46 46 pycompat,
47 47 repair,
48 48 revlog,
49 49 rewriteutil,
50 50 scmutil,
51 51 smartset,
52 52 state as statemod,
53 53 subrepoutil,
54 54 templatekw,
55 55 templater,
56 56 util,
57 57 vfs as vfsmod,
58 58 )
59 59
60 60 from .utils import (
61 61 dateutil,
62 62 stringutil,
63 63 )
64 64
65 65 if pycompat.TYPE_CHECKING:
66 66 from typing import (
67 67 Any,
68 68 Dict,
69 69 )
70 70
71 71 for t in (Any, Dict):
72 72 assert t
73 73
74 74 stringio = util.stringio
75 75
76 76 # templates of common command options
77 77
78 78 dryrunopts = [
79 79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 80 ]
81 81
82 82 confirmopts = [
83 83 (b'', b'confirm', None, _(b'ask before applying actions')),
84 84 ]
85 85
86 86 remoteopts = [
87 87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 88 (
89 89 b'',
90 90 b'remotecmd',
91 91 b'',
92 92 _(b'specify hg command to run on the remote side'),
93 93 _(b'CMD'),
94 94 ),
95 95 (
96 96 b'',
97 97 b'insecure',
98 98 None,
99 99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 100 ),
101 101 ]
102 102
103 103 walkopts = [
104 104 (
105 105 b'I',
106 106 b'include',
107 107 [],
108 108 _(b'include names matching the given patterns'),
109 109 _(b'PATTERN'),
110 110 ),
111 111 (
112 112 b'X',
113 113 b'exclude',
114 114 [],
115 115 _(b'exclude names matching the given patterns'),
116 116 _(b'PATTERN'),
117 117 ),
118 118 ]
119 119
120 120 commitopts = [
121 121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 123 ]
124 124
125 125 commitopts2 = [
126 126 (
127 127 b'd',
128 128 b'date',
129 129 b'',
130 130 _(b'record the specified date as commit date'),
131 131 _(b'DATE'),
132 132 ),
133 133 (
134 134 b'u',
135 135 b'user',
136 136 b'',
137 137 _(b'record the specified user as committer'),
138 138 _(b'USER'),
139 139 ),
140 140 ]
141 141
142 142 commitopts3 = [
143 143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 145 ]
146 146
147 147 formatteropts = [
148 148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 149 ]
150 150
151 151 templateopts = [
152 152 (
153 153 b'',
154 154 b'style',
155 155 b'',
156 156 _(b'display using template map file (DEPRECATED)'),
157 157 _(b'STYLE'),
158 158 ),
159 159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 160 ]
161 161
162 162 logopts = [
163 163 (b'p', b'patch', None, _(b'show patch')),
164 164 (b'g', b'git', None, _(b'use git extended diff format')),
165 165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 166 (b'M', b'no-merges', None, _(b'do not show merges')),
167 167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 168 (b'G', b'graph', None, _(b"show the revision DAG")),
169 169 ] + templateopts
170 170
171 171 diffopts = [
172 172 (b'a', b'text', None, _(b'treat all files as text')),
173 173 (
174 174 b'g',
175 175 b'git',
176 176 None,
177 177 _(b'use git extended diff format (DEFAULT: diff.git)'),
178 178 ),
179 179 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
180 180 (b'', b'nodates', None, _(b'omit dates from diff headers')),
181 181 ]
182 182
183 183 diffwsopts = [
184 184 (
185 185 b'w',
186 186 b'ignore-all-space',
187 187 None,
188 188 _(b'ignore white space when comparing lines'),
189 189 ),
190 190 (
191 191 b'b',
192 192 b'ignore-space-change',
193 193 None,
194 194 _(b'ignore changes in the amount of white space'),
195 195 ),
196 196 (
197 197 b'B',
198 198 b'ignore-blank-lines',
199 199 None,
200 200 _(b'ignore changes whose lines are all blank'),
201 201 ),
202 202 (
203 203 b'Z',
204 204 b'ignore-space-at-eol',
205 205 None,
206 206 _(b'ignore changes in whitespace at EOL'),
207 207 ),
208 208 ]
209 209
210 210 diffopts2 = (
211 211 [
212 212 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
213 213 (
214 214 b'p',
215 215 b'show-function',
216 216 None,
217 217 _(
218 218 b'show which function each change is in (DEFAULT: diff.showfunc)'
219 219 ),
220 220 ),
221 221 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
222 222 ]
223 223 + diffwsopts
224 224 + [
225 225 (
226 226 b'U',
227 227 b'unified',
228 228 b'',
229 229 _(b'number of lines of context to show'),
230 230 _(b'NUM'),
231 231 ),
232 232 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
233 233 (
234 234 b'',
235 235 b'root',
236 236 b'',
237 237 _(b'produce diffs relative to subdirectory'),
238 238 _(b'DIR'),
239 239 ),
240 240 ]
241 241 )
242 242
243 243 mergetoolopts = [
244 244 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
245 245 ]
246 246
247 247 similarityopts = [
248 248 (
249 249 b's',
250 250 b'similarity',
251 251 b'',
252 252 _(b'guess renamed files by similarity (0<=s<=100)'),
253 253 _(b'SIMILARITY'),
254 254 )
255 255 ]
256 256
257 257 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
258 258
259 259 debugrevlogopts = [
260 260 (b'c', b'changelog', False, _(b'open changelog')),
261 261 (b'm', b'manifest', False, _(b'open manifest')),
262 262 (b'', b'dir', b'', _(b'open directory manifest')),
263 263 ]
264 264
265 265 # special string such that everything below this line will be ingored in the
266 266 # editor text
267 267 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
268 268
269 269
270 270 def check_at_most_one_arg(opts, *args):
271 271 """abort if more than one of the arguments are in opts
272 272
273 273 Returns the unique argument or None if none of them were specified.
274 274 """
275 275
276 276 def to_display(name):
277 277 return pycompat.sysbytes(name).replace(b'_', b'-')
278 278
279 279 previous = None
280 280 for x in args:
281 281 if opts.get(x):
282 282 if previous:
283 283 raise error.Abort(
284 284 _(b'cannot specify both --%s and --%s')
285 285 % (to_display(previous), to_display(x))
286 286 )
287 287 previous = x
288 288 return previous
289 289
290 290
291 291 def check_incompatible_arguments(opts, first, others):
292 292 """abort if the first argument is given along with any of the others
293 293
294 294 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
295 295 among themselves, and they're passed as a single collection.
296 296 """
297 297 for other in others:
298 298 check_at_most_one_arg(opts, first, other)
299 299
300 300
301 301 def resolvecommitoptions(ui, opts):
302 302 """modify commit options dict to handle related options
303 303
304 304 The return value indicates that ``rewrite.update-timestamp`` is the reason
305 305 the ``date`` option is set.
306 306 """
307 307 check_at_most_one_arg(opts, b'date', b'currentdate')
308 308 check_at_most_one_arg(opts, b'user', b'currentuser')
309 309
310 310 datemaydiffer = False # date-only change should be ignored?
311 311
312 312 if opts.get(b'currentdate'):
313 313 opts[b'date'] = b'%d %d' % dateutil.makedate()
314 314 elif (
315 315 not opts.get(b'date')
316 316 and ui.configbool(b'rewrite', b'update-timestamp')
317 317 and opts.get(b'currentdate') is None
318 318 ):
319 319 opts[b'date'] = b'%d %d' % dateutil.makedate()
320 320 datemaydiffer = True
321 321
322 322 if opts.get(b'currentuser'):
323 323 opts[b'user'] = ui.username()
324 324
325 325 return datemaydiffer
326 326
327 327
328 328 def checknotesize(ui, opts):
329 329 """ make sure note is of valid format """
330 330
331 331 note = opts.get(b'note')
332 332 if not note:
333 333 return
334 334
335 335 if len(note) > 255:
336 336 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
337 337 if b'\n' in note:
338 338 raise error.Abort(_(b"note cannot contain a newline"))
339 339
340 340
341 341 def ishunk(x):
342 342 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
343 343 return isinstance(x, hunkclasses)
344 344
345 345
346 346 def newandmodified(chunks, originalchunks):
347 347 newlyaddedandmodifiedfiles = set()
348 348 alsorestore = set()
349 349 for chunk in chunks:
350 350 if (
351 351 ishunk(chunk)
352 352 and chunk.header.isnewfile()
353 353 and chunk not in originalchunks
354 354 ):
355 355 newlyaddedandmodifiedfiles.add(chunk.header.filename())
356 356 alsorestore.update(
357 357 set(chunk.header.files()) - {chunk.header.filename()}
358 358 )
359 359 return newlyaddedandmodifiedfiles, alsorestore
360 360
361 361
362 362 def parsealiases(cmd):
363 363 return cmd.split(b"|")
364 364
365 365
366 366 def setupwrapcolorwrite(ui):
367 367 # wrap ui.write so diff output can be labeled/colorized
368 368 def wrapwrite(orig, *args, **kw):
369 369 label = kw.pop('label', b'')
370 370 for chunk, l in patch.difflabel(lambda: args):
371 371 orig(chunk, label=label + l)
372 372
373 373 oldwrite = ui.write
374 374
375 375 def wrap(*args, **kwargs):
376 376 return wrapwrite(oldwrite, *args, **kwargs)
377 377
378 378 setattr(ui, 'write', wrap)
379 379 return oldwrite
380 380
381 381
382 382 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
383 383 try:
384 384 if usecurses:
385 385 if testfile:
386 386 recordfn = crecordmod.testdecorator(
387 387 testfile, crecordmod.testchunkselector
388 388 )
389 389 else:
390 390 recordfn = crecordmod.chunkselector
391 391
392 392 return crecordmod.filterpatch(
393 393 ui, originalhunks, recordfn, operation
394 394 )
395 395 except crecordmod.fallbackerror as e:
396 396 ui.warn(b'%s\n' % e)
397 397 ui.warn(_(b'falling back to text mode\n'))
398 398
399 399 return patch.filterpatch(ui, originalhunks, match, operation)
400 400
401 401
402 402 def recordfilter(ui, originalhunks, match, operation=None):
403 403 """ Prompts the user to filter the originalhunks and return a list of
404 404 selected hunks.
405 405 *operation* is used for to build ui messages to indicate the user what
406 406 kind of filtering they are doing: reverting, committing, shelving, etc.
407 407 (see patch.filterpatch).
408 408 """
409 409 usecurses = crecordmod.checkcurses(ui)
410 410 testfile = ui.config(b'experimental', b'crecordtest')
411 411 oldwrite = setupwrapcolorwrite(ui)
412 412 try:
413 413 newchunks, newopts = filterchunks(
414 414 ui, originalhunks, usecurses, testfile, match, operation
415 415 )
416 416 finally:
417 417 ui.write = oldwrite
418 418 return newchunks, newopts
419 419
420 420
421 421 def dorecord(
422 422 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
423 423 ):
424 424 opts = pycompat.byteskwargs(opts)
425 425 if not ui.interactive():
426 426 if cmdsuggest:
427 427 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
428 428 else:
429 429 msg = _(b'running non-interactively')
430 430 raise error.Abort(msg)
431 431
432 432 # make sure username is set before going interactive
433 433 if not opts.get(b'user'):
434 434 ui.username() # raise exception, username not provided
435 435
436 436 def recordfunc(ui, repo, message, match, opts):
437 437 """This is generic record driver.
438 438
439 439 Its job is to interactively filter local changes, and
440 440 accordingly prepare working directory into a state in which the
441 441 job can be delegated to a non-interactive commit command such as
442 442 'commit' or 'qrefresh'.
443 443
444 444 After the actual job is done by non-interactive command, the
445 445 working directory is restored to its original state.
446 446
447 447 In the end we'll record interesting changes, and everything else
448 448 will be left in place, so the user can continue working.
449 449 """
450 450 if not opts.get(b'interactive-unshelve'):
451 451 checkunfinished(repo, commit=True)
452 452 wctx = repo[None]
453 453 merge = len(wctx.parents()) > 1
454 454 if merge:
455 455 raise error.Abort(
456 456 _(
457 457 b'cannot partially commit a merge '
458 458 b'(use "hg commit" instead)'
459 459 )
460 460 )
461 461
462 462 def fail(f, msg):
463 463 raise error.Abort(b'%s: %s' % (f, msg))
464 464
465 465 force = opts.get(b'force')
466 466 if not force:
467 467 match = matchmod.badmatch(match, fail)
468 468
469 469 status = repo.status(match=match)
470 470
471 471 overrides = {(b'ui', b'commitsubrepos'): True}
472 472
473 473 with repo.ui.configoverride(overrides, b'record'):
474 474 # subrepoutil.precommit() modifies the status
475 475 tmpstatus = scmutil.status(
476 476 copymod.copy(status.modified),
477 477 copymod.copy(status.added),
478 478 copymod.copy(status.removed),
479 479 copymod.copy(status.deleted),
480 480 copymod.copy(status.unknown),
481 481 copymod.copy(status.ignored),
482 482 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
483 483 )
484 484
485 485 # Force allows -X subrepo to skip the subrepo.
486 486 subs, commitsubs, newstate = subrepoutil.precommit(
487 487 repo.ui, wctx, tmpstatus, match, force=True
488 488 )
489 489 for s in subs:
490 490 if s in commitsubs:
491 491 dirtyreason = wctx.sub(s).dirtyreason(True)
492 492 raise error.Abort(dirtyreason)
493 493
494 494 if not force:
495 495 repo.checkcommitpatterns(wctx, match, status, fail)
496 496 diffopts = patch.difffeatureopts(
497 497 ui,
498 498 opts=opts,
499 499 whitespace=True,
500 500 section=b'commands',
501 501 configprefix=b'commit.interactive.',
502 502 )
503 503 diffopts.nodates = True
504 504 diffopts.git = True
505 505 diffopts.showfunc = True
506 506 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
507 507 originalchunks = patch.parsepatch(originaldiff)
508 508 match = scmutil.match(repo[None], pats)
509 509
510 510 # 1. filter patch, since we are intending to apply subset of it
511 511 try:
512 512 chunks, newopts = filterfn(ui, originalchunks, match)
513 513 except error.PatchError as err:
514 514 raise error.Abort(_(b'error parsing patch: %s') % err)
515 515 opts.update(newopts)
516 516
517 517 # We need to keep a backup of files that have been newly added and
518 518 # modified during the recording process because there is a previous
519 519 # version without the edit in the workdir. We also will need to restore
520 520 # files that were the sources of renames so that the patch application
521 521 # works.
522 522 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
523 523 chunks, originalchunks
524 524 )
525 525 contenders = set()
526 526 for h in chunks:
527 527 try:
528 528 contenders.update(set(h.files()))
529 529 except AttributeError:
530 530 pass
531 531
532 532 changed = status.modified + status.added + status.removed
533 533 newfiles = [f for f in changed if f in contenders]
534 534 if not newfiles:
535 535 ui.status(_(b'no changes to record\n'))
536 536 return 0
537 537
538 538 modified = set(status.modified)
539 539
540 540 # 2. backup changed files, so we can restore them in the end
541 541
542 542 if backupall:
543 543 tobackup = changed
544 544 else:
545 545 tobackup = [
546 546 f
547 547 for f in newfiles
548 548 if f in modified or f in newlyaddedandmodifiedfiles
549 549 ]
550 550 backups = {}
551 551 if tobackup:
552 552 backupdir = repo.vfs.join(b'record-backups')
553 553 try:
554 554 os.mkdir(backupdir)
555 555 except OSError as err:
556 556 if err.errno != errno.EEXIST:
557 557 raise
558 558 try:
559 559 # backup continues
560 560 for f in tobackup:
561 561 fd, tmpname = pycompat.mkstemp(
562 562 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
563 563 )
564 564 os.close(fd)
565 565 ui.debug(b'backup %r as %r\n' % (f, tmpname))
566 566 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
567 567 backups[f] = tmpname
568 568
569 569 fp = stringio()
570 570 for c in chunks:
571 571 fname = c.filename()
572 572 if fname in backups:
573 573 c.write(fp)
574 574 dopatch = fp.tell()
575 575 fp.seek(0)
576 576
577 577 # 2.5 optionally review / modify patch in text editor
578 578 if opts.get(b'review', False):
579 579 patchtext = (
580 580 crecordmod.diffhelptext
581 581 + crecordmod.patchhelptext
582 582 + fp.read()
583 583 )
584 584 reviewedpatch = ui.edit(
585 585 patchtext, b"", action=b"diff", repopath=repo.path
586 586 )
587 587 fp.truncate(0)
588 588 fp.write(reviewedpatch)
589 589 fp.seek(0)
590 590
591 591 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
592 592 # 3a. apply filtered patch to clean repo (clean)
593 593 if backups:
594 594 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
595 595 mergemod.revert_to(repo[b'.'], matcher=m)
596 596
597 597 # 3b. (apply)
598 598 if dopatch:
599 599 try:
600 600 ui.debug(b'applying patch\n')
601 601 ui.debug(fp.getvalue())
602 602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 603 except error.PatchError as err:
604 604 raise error.Abort(pycompat.bytestr(err))
605 605 del fp
606 606
607 607 # 4. We prepared working directory according to filtered
608 608 # patch. Now is the time to delegate the job to
609 609 # commit/qrefresh or the like!
610 610
611 611 # Make all of the pathnames absolute.
612 612 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 614 finally:
615 615 # 5. finally restore backed-up files
616 616 try:
617 617 dirstate = repo.dirstate
618 618 for realname, tmpname in pycompat.iteritems(backups):
619 619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620 620
621 621 if dirstate[realname] == b'n':
622 622 # without normallookup, restoring timestamp
623 623 # may cause partially committed files
624 624 # to be treated as unmodified
625 625 dirstate.normallookup(realname)
626 626
627 627 # copystat=True here and above are a hack to trick any
628 628 # editors that have f open that we haven't modified them.
629 629 #
630 630 # Also note that this racy as an editor could notice the
631 631 # file's mtime before we've finished writing it.
632 632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 633 os.unlink(tmpname)
634 634 if tobackup:
635 635 os.rmdir(backupdir)
636 636 except OSError:
637 637 pass
638 638
639 639 def recordinwlock(ui, repo, message, match, opts):
640 640 with repo.wlock():
641 641 return recordfunc(ui, repo, message, match, opts)
642 642
643 643 return commit(ui, repo, recordinwlock, pats, opts)
644 644
645 645
646 646 class dirnode(object):
647 647 """
648 648 Represent a directory in user working copy with information required for
649 649 the purpose of tersing its status.
650 650
651 651 path is the path to the directory, without a trailing '/'
652 652
653 653 statuses is a set of statuses of all files in this directory (this includes
654 654 all the files in all the subdirectories too)
655 655
656 656 files is a list of files which are direct child of this directory
657 657
658 658 subdirs is a dictionary of sub-directory name as the key and it's own
659 659 dirnode object as the value
660 660 """
661 661
662 662 def __init__(self, dirpath):
663 663 self.path = dirpath
664 664 self.statuses = set()
665 665 self.files = []
666 666 self.subdirs = {}
667 667
668 668 def _addfileindir(self, filename, status):
669 669 """Add a file in this directory as a direct child."""
670 670 self.files.append((filename, status))
671 671
672 672 def addfile(self, filename, status):
673 673 """
674 674 Add a file to this directory or to its direct parent directory.
675 675
676 676 If the file is not direct child of this directory, we traverse to the
677 677 directory of which this file is a direct child of and add the file
678 678 there.
679 679 """
680 680
681 681 # the filename contains a path separator, it means it's not the direct
682 682 # child of this directory
683 683 if b'/' in filename:
684 684 subdir, filep = filename.split(b'/', 1)
685 685
686 686 # does the dirnode object for subdir exists
687 687 if subdir not in self.subdirs:
688 688 subdirpath = pathutil.join(self.path, subdir)
689 689 self.subdirs[subdir] = dirnode(subdirpath)
690 690
691 691 # try adding the file in subdir
692 692 self.subdirs[subdir].addfile(filep, status)
693 693
694 694 else:
695 695 self._addfileindir(filename, status)
696 696
697 697 if status not in self.statuses:
698 698 self.statuses.add(status)
699 699
700 700 def iterfilepaths(self):
701 701 """Yield (status, path) for files directly under this directory."""
702 702 for f, st in self.files:
703 703 yield st, pathutil.join(self.path, f)
704 704
705 705 def tersewalk(self, terseargs):
706 706 """
707 707 Yield (status, path) obtained by processing the status of this
708 708 dirnode.
709 709
710 710 terseargs is the string of arguments passed by the user with `--terse`
711 711 flag.
712 712
713 713 Following are the cases which can happen:
714 714
715 715 1) All the files in the directory (including all the files in its
716 716 subdirectories) share the same status and the user has asked us to terse
717 717 that status. -> yield (status, dirpath). dirpath will end in '/'.
718 718
719 719 2) Otherwise, we do following:
720 720
721 721 a) Yield (status, filepath) for all the files which are in this
722 722 directory (only the ones in this directory, not the subdirs)
723 723
724 724 b) Recurse the function on all the subdirectories of this
725 725 directory
726 726 """
727 727
728 728 if len(self.statuses) == 1:
729 729 onlyst = self.statuses.pop()
730 730
731 731 # Making sure we terse only when the status abbreviation is
732 732 # passed as terse argument
733 733 if onlyst in terseargs:
734 734 yield onlyst, self.path + b'/'
735 735 return
736 736
737 737 # add the files to status list
738 738 for st, fpath in self.iterfilepaths():
739 739 yield st, fpath
740 740
741 741 # recurse on the subdirs
742 742 for dirobj in self.subdirs.values():
743 743 for st, fpath in dirobj.tersewalk(terseargs):
744 744 yield st, fpath
745 745
746 746
747 747 def tersedir(statuslist, terseargs):
748 748 """
749 749 Terse the status if all the files in a directory shares the same status.
750 750
751 751 statuslist is scmutil.status() object which contains a list of files for
752 752 each status.
753 753 terseargs is string which is passed by the user as the argument to `--terse`
754 754 flag.
755 755
756 756 The function makes a tree of objects of dirnode class, and at each node it
757 757 stores the information required to know whether we can terse a certain
758 758 directory or not.
759 759 """
760 760 # the order matters here as that is used to produce final list
761 761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762 762
763 763 # checking the argument validity
764 764 for s in pycompat.bytestr(terseargs):
765 765 if s not in allst:
766 766 raise error.Abort(_(b"'%s' not recognized") % s)
767 767
768 768 # creating a dirnode object for the root of the repo
769 769 rootobj = dirnode(b'')
770 770 pstatus = (
771 771 b'modified',
772 772 b'added',
773 773 b'deleted',
774 774 b'clean',
775 775 b'unknown',
776 776 b'ignored',
777 777 b'removed',
778 778 )
779 779
780 780 tersedict = {}
781 781 for attrname in pstatus:
782 782 statuschar = attrname[0:1]
783 783 for f in getattr(statuslist, attrname):
784 784 rootobj.addfile(f, statuschar)
785 785 tersedict[statuschar] = []
786 786
787 787 # we won't be tersing the root dir, so add files in it
788 788 for st, fpath in rootobj.iterfilepaths():
789 789 tersedict[st].append(fpath)
790 790
791 791 # process each sub-directory and build tersedict
792 792 for subdir in rootobj.subdirs.values():
793 793 for st, f in subdir.tersewalk(terseargs):
794 794 tersedict[st].append(f)
795 795
796 796 tersedlist = []
797 797 for st in allst:
798 798 tersedict[st].sort()
799 799 tersedlist.append(tersedict[st])
800 800
801 801 return scmutil.status(*tersedlist)
802 802
803 803
804 804 def _commentlines(raw):
805 805 '''Surround lineswith a comment char and a new line'''
806 806 lines = raw.splitlines()
807 807 commentedlines = [b'# %s' % line for line in lines]
808 808 return b'\n'.join(commentedlines) + b'\n'
809 809
810 810
811 811 @attr.s(frozen=True)
812 812 class morestatus(object):
813 813 reporoot = attr.ib()
814 814 unfinishedop = attr.ib()
815 815 unfinishedmsg = attr.ib()
816 816 activemerge = attr.ib()
817 817 unresolvedpaths = attr.ib()
818 818 _formattedpaths = attr.ib(init=False, default=set())
819 819 _label = b'status.morestatus'
820 820
821 821 def formatfile(self, path, fm):
822 822 self._formattedpaths.add(path)
823 823 if self.activemerge and path in self.unresolvedpaths:
824 824 fm.data(unresolved=True)
825 825
826 826 def formatfooter(self, fm):
827 827 if self.unfinishedop or self.unfinishedmsg:
828 828 fm.startitem()
829 829 fm.data(itemtype=b'morestatus')
830 830
831 831 if self.unfinishedop:
832 832 fm.data(unfinished=self.unfinishedop)
833 833 statemsg = (
834 834 _(b'The repository is in an unfinished *%s* state.')
835 835 % self.unfinishedop
836 836 )
837 837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 838 if self.unfinishedmsg:
839 839 fm.data(unfinishedmsg=self.unfinishedmsg)
840 840
841 841 # May also start new data items.
842 842 self._formatconflicts(fm)
843 843
844 844 if self.unfinishedmsg:
845 845 fm.plain(
846 846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 847 )
848 848
849 849 def _formatconflicts(self, fm):
850 850 if not self.activemerge:
851 851 return
852 852
853 853 if self.unresolvedpaths:
854 854 mergeliststr = b'\n'.join(
855 855 [
856 856 b' %s'
857 857 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 858 for path in self.unresolvedpaths
859 859 ]
860 860 )
861 861 msg = (
862 862 _(
863 863 '''Unresolved merge conflicts:
864 864
865 865 %s
866 866
867 867 To mark files as resolved: hg resolve --mark FILE'''
868 868 )
869 869 % mergeliststr
870 870 )
871 871
872 872 # If any paths with unresolved conflicts were not previously
873 873 # formatted, output them now.
874 874 for f in self.unresolvedpaths:
875 875 if f in self._formattedpaths:
876 876 # Already output.
877 877 continue
878 878 fm.startitem()
879 879 # We can't claim to know the status of the file - it may just
880 880 # have been in one of the states that were not requested for
881 881 # display, so it could be anything.
882 882 fm.data(itemtype=b'file', path=f, unresolved=True)
883 883
884 884 else:
885 885 msg = _(b'No unresolved merge conflicts.')
886 886
887 887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888 888
889 889
890 890 def readmorestatus(repo):
891 891 """Returns a morestatus object if the repo has unfinished state."""
892 892 statetuple = statemod.getrepostate(repo)
893 893 mergestate = mergemod.mergestate.read(repo)
894 894 activemerge = mergestate.active()
895 895 if not statetuple and not activemerge:
896 896 return None
897 897
898 898 unfinishedop = unfinishedmsg = unresolved = None
899 899 if statetuple:
900 900 unfinishedop, unfinishedmsg = statetuple
901 901 if activemerge:
902 902 unresolved = sorted(mergestate.unresolved())
903 903 return morestatus(
904 904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 905 )
906 906
907 907
908 908 def findpossible(cmd, table, strict=False):
909 909 """
910 910 Return cmd -> (aliases, command table entry)
911 911 for each matching command.
912 912 Return debug commands (or their aliases) only if no normal command matches.
913 913 """
914 914 choice = {}
915 915 debugchoice = {}
916 916
917 917 if cmd in table:
918 918 # short-circuit exact matches, "log" alias beats "log|history"
919 919 keys = [cmd]
920 920 else:
921 921 keys = table.keys()
922 922
923 923 allcmds = []
924 924 for e in keys:
925 925 aliases = parsealiases(e)
926 926 allcmds.extend(aliases)
927 927 found = None
928 928 if cmd in aliases:
929 929 found = cmd
930 930 elif not strict:
931 931 for a in aliases:
932 932 if a.startswith(cmd):
933 933 found = a
934 934 break
935 935 if found is not None:
936 936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 937 debugchoice[found] = (aliases, table[e])
938 938 else:
939 939 choice[found] = (aliases, table[e])
940 940
941 941 if not choice and debugchoice:
942 942 choice = debugchoice
943 943
944 944 return choice, allcmds
945 945
946 946
947 947 def findcmd(cmd, table, strict=True):
948 948 """Return (aliases, command table entry) for command string."""
949 949 choice, allcmds = findpossible(cmd, table, strict)
950 950
951 951 if cmd in choice:
952 952 return choice[cmd]
953 953
954 954 if len(choice) > 1:
955 955 clist = sorted(choice)
956 956 raise error.AmbiguousCommand(cmd, clist)
957 957
958 958 if choice:
959 959 return list(choice.values())[0]
960 960
961 961 raise error.UnknownCommand(cmd, allcmds)
962 962
963 963
964 964 def changebranch(ui, repo, revs, label):
965 965 """ Change the branch name of given revs to label """
966 966
967 967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 968 # abort in case of uncommitted merge or dirty wdir
969 969 bailifchanged(repo)
970 970 revs = scmutil.revrange(repo, revs)
971 971 if not revs:
972 972 raise error.Abort(b"empty revision set")
973 973 roots = repo.revs(b'roots(%ld)', revs)
974 974 if len(roots) > 1:
975 975 raise error.Abort(
976 976 _(b"cannot change branch of non-linear revisions")
977 977 )
978 978 rewriteutil.precheck(repo, revs, b'change branch of')
979 979
980 980 root = repo[roots.first()]
981 981 rpb = {parent.branch() for parent in root.parents()}
982 982 if label not in rpb and label in repo.branchmap():
983 983 raise error.Abort(_(b"a branch of the same name already exists"))
984 984
985 985 if repo.revs(b'obsolete() and %ld', revs):
986 986 raise error.Abort(
987 987 _(b"cannot change branch of a obsolete changeset")
988 988 )
989 989
990 990 # make sure only topological heads
991 991 if repo.revs(b'heads(%ld) - head()', revs):
992 992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
993 993
994 994 replacements = {}
995 995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
996 996 # mercurial.subrepo -> mercurial.cmdutil
997 997 from . import context
998 998
999 999 for rev in revs:
1000 1000 ctx = repo[rev]
1001 1001 oldbranch = ctx.branch()
1002 1002 # check if ctx has same branch
1003 1003 if oldbranch == label:
1004 1004 continue
1005 1005
1006 1006 def filectxfn(repo, newctx, path):
1007 1007 try:
1008 1008 return ctx[path]
1009 1009 except error.ManifestLookupError:
1010 1010 return None
1011 1011
1012 1012 ui.debug(
1013 1013 b"changing branch of '%s' from '%s' to '%s'\n"
1014 1014 % (hex(ctx.node()), oldbranch, label)
1015 1015 )
1016 1016 extra = ctx.extra()
1017 1017 extra[b'branch_change'] = hex(ctx.node())
1018 1018 # While changing branch of set of linear commits, make sure that
1019 1019 # we base our commits on new parent rather than old parent which
1020 1020 # was obsoleted while changing the branch
1021 1021 p1 = ctx.p1().node()
1022 1022 p2 = ctx.p2().node()
1023 1023 if p1 in replacements:
1024 1024 p1 = replacements[p1][0]
1025 1025 if p2 in replacements:
1026 1026 p2 = replacements[p2][0]
1027 1027
1028 1028 mc = context.memctx(
1029 1029 repo,
1030 1030 (p1, p2),
1031 1031 ctx.description(),
1032 1032 ctx.files(),
1033 1033 filectxfn,
1034 1034 user=ctx.user(),
1035 1035 date=ctx.date(),
1036 1036 extra=extra,
1037 1037 branch=label,
1038 1038 )
1039 1039
1040 1040 newnode = repo.commitctx(mc)
1041 1041 replacements[ctx.node()] = (newnode,)
1042 1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1043 1043
1044 1044 # create obsmarkers and move bookmarks
1045 1045 scmutil.cleanupnodes(
1046 1046 repo, replacements, b'branch-change', fixphase=True
1047 1047 )
1048 1048
1049 1049 # move the working copy too
1050 1050 wctx = repo[None]
1051 1051 # in-progress merge is a bit too complex for now.
1052 1052 if len(wctx.parents()) == 1:
1053 1053 newid = replacements.get(wctx.p1().node())
1054 1054 if newid is not None:
1055 1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1056 1056 # mercurial.cmdutil
1057 1057 from . import hg
1058 1058
1059 1059 hg.update(repo, newid[0], quietempty=True)
1060 1060
1061 1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1062 1062
1063 1063
1064 1064 def findrepo(p):
1065 1065 while not os.path.isdir(os.path.join(p, b".hg")):
1066 1066 oldp, p = p, os.path.dirname(p)
1067 1067 if p == oldp:
1068 1068 return None
1069 1069
1070 1070 return p
1071 1071
1072 1072
1073 1073 def bailifchanged(repo, merge=True, hint=None):
1074 1074 """ enforce the precondition that working directory must be clean.
1075 1075
1076 1076 'merge' can be set to false if a pending uncommitted merge should be
1077 1077 ignored (such as when 'update --check' runs).
1078 1078
1079 1079 'hint' is the usual hint given to Abort exception.
1080 1080 """
1081 1081
1082 1082 if merge and repo.dirstate.p2() != nullid:
1083 1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1084 1084 st = repo.status()
1085 1085 if st.modified or st.added or st.removed or st.deleted:
1086 1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1087 1087 ctx = repo[None]
1088 1088 for s in sorted(ctx.substate):
1089 1089 ctx.sub(s).bailifchanged(hint=hint)
1090 1090
1091 1091
1092 1092 def logmessage(ui, opts):
1093 1093 """ get the log message according to -m and -l option """
1094 1094
1095 1095 check_at_most_one_arg(opts, b'message', b'logfile')
1096 1096
1097 1097 message = opts.get(b'message')
1098 1098 logfile = opts.get(b'logfile')
1099 1099
1100 1100 if not message and logfile:
1101 1101 try:
1102 1102 if isstdiofilename(logfile):
1103 1103 message = ui.fin.read()
1104 1104 else:
1105 1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1106 1106 except IOError as inst:
1107 1107 raise error.Abort(
1108 1108 _(b"can't read commit message '%s': %s")
1109 1109 % (logfile, encoding.strtolocal(inst.strerror))
1110 1110 )
1111 1111 return message
1112 1112
1113 1113
1114 1114 def mergeeditform(ctxorbool, baseformname):
1115 1115 """return appropriate editform name (referencing a committemplate)
1116 1116
1117 1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1118 1118 merging is committed.
1119 1119
1120 1120 This returns baseformname with '.merge' appended if it is a merge,
1121 1121 otherwise '.normal' is appended.
1122 1122 """
1123 1123 if isinstance(ctxorbool, bool):
1124 1124 if ctxorbool:
1125 1125 return baseformname + b".merge"
1126 1126 elif len(ctxorbool.parents()) > 1:
1127 1127 return baseformname + b".merge"
1128 1128
1129 1129 return baseformname + b".normal"
1130 1130
1131 1131
1132 1132 def getcommiteditor(
1133 1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1134 1134 ):
1135 1135 """get appropriate commit message editor according to '--edit' option
1136 1136
1137 1137 'finishdesc' is a function to be called with edited commit message
1138 1138 (= 'description' of the new changeset) just after editing, but
1139 1139 before checking empty-ness. It should return actual text to be
1140 1140 stored into history. This allows to change description before
1141 1141 storing.
1142 1142
1143 1143 'extramsg' is a extra message to be shown in the editor instead of
1144 1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1145 1145 is automatically added.
1146 1146
1147 1147 'editform' is a dot-separated list of names, to distinguish
1148 1148 the purpose of commit text editing.
1149 1149
1150 1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1151 1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1152 1152 they are specific for usage in MQ.
1153 1153 """
1154 1154 if edit or finishdesc or extramsg:
1155 1155 return lambda r, c, s: commitforceeditor(
1156 1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1157 1157 )
1158 1158 elif editform:
1159 1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1160 1160 else:
1161 1161 return commiteditor
1162 1162
1163 1163
1164 1164 def _escapecommandtemplate(tmpl):
1165 1165 parts = []
1166 1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1167 1167 if typ == b'string':
1168 1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1169 1169 else:
1170 1170 parts.append(tmpl[start:end])
1171 1171 return b''.join(parts)
1172 1172
1173 1173
1174 1174 def rendercommandtemplate(ui, tmpl, props):
1175 1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1176 1176
1177 1177 '\' in outermost string is not taken as an escape character because it
1178 1178 is a directory separator on Windows.
1179 1179
1180 1180 >>> from . import ui as uimod
1181 1181 >>> ui = uimod.ui()
1182 1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1183 1183 'c:\\foo'
1184 1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1185 1185 'c:{path}'
1186 1186 """
1187 1187 if not tmpl:
1188 1188 return tmpl
1189 1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1190 1190 return t.renderdefault(props)
1191 1191
1192 1192
1193 1193 def rendertemplate(ctx, tmpl, props=None):
1194 1194 """Expand a literal template 'tmpl' byte-string against one changeset
1195 1195
1196 1196 Each props item must be a stringify-able value or a callable returning
1197 1197 such value, i.e. no bare list nor dict should be passed.
1198 1198 """
1199 1199 repo = ctx.repo()
1200 1200 tres = formatter.templateresources(repo.ui, repo)
1201 1201 t = formatter.maketemplater(
1202 1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1203 1203 )
1204 1204 mapping = {b'ctx': ctx}
1205 1205 if props:
1206 1206 mapping.update(props)
1207 1207 return t.renderdefault(mapping)
1208 1208
1209 1209
1210 1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1211 1211 r"""Convert old-style filename format string to template string
1212 1212
1213 1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1214 1214 'foo-{reporoot|basename}-{seqno}.patch'
1215 1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1216 1216 '{rev}{tags % "{tag}"}{node}'
1217 1217
1218 1218 '\' in outermost strings has to be escaped because it is a directory
1219 1219 separator on Windows:
1220 1220
1221 1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1222 1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1223 1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1224 1224 '\\\\\\\\foo\\\\bar.patch'
1225 1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1226 1226 '\\\\{tags % "{tag}"}'
1227 1227
1228 1228 but inner strings follow the template rules (i.e. '\' is taken as an
1229 1229 escape character):
1230 1230
1231 1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1232 1232 '{"c:\\tmp"}'
1233 1233 """
1234 1234 expander = {
1235 1235 b'H': b'{node}',
1236 1236 b'R': b'{rev}',
1237 1237 b'h': b'{node|short}',
1238 1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1239 1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1240 1240 b'%': b'%',
1241 1241 b'b': b'{reporoot|basename}',
1242 1242 }
1243 1243 if total is not None:
1244 1244 expander[b'N'] = b'{total}'
1245 1245 if seqno is not None:
1246 1246 expander[b'n'] = b'{seqno}'
1247 1247 if total is not None and seqno is not None:
1248 1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1249 1249 if pathname is not None:
1250 1250 expander[b's'] = b'{pathname|basename}'
1251 1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1252 1252 expander[b'p'] = b'{pathname}'
1253 1253
1254 1254 newname = []
1255 1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1256 1256 if typ != b'string':
1257 1257 newname.append(pat[start:end])
1258 1258 continue
1259 1259 i = start
1260 1260 while i < end:
1261 1261 n = pat.find(b'%', i, end)
1262 1262 if n < 0:
1263 1263 newname.append(stringutil.escapestr(pat[i:end]))
1264 1264 break
1265 1265 newname.append(stringutil.escapestr(pat[i:n]))
1266 1266 if n + 2 > end:
1267 1267 raise error.Abort(
1268 1268 _(b"incomplete format spec in output filename")
1269 1269 )
1270 1270 c = pat[n + 1 : n + 2]
1271 1271 i = n + 2
1272 1272 try:
1273 1273 newname.append(expander[c])
1274 1274 except KeyError:
1275 1275 raise error.Abort(
1276 1276 _(b"invalid format spec '%%%s' in output filename") % c
1277 1277 )
1278 1278 return b''.join(newname)
1279 1279
1280 1280
1281 1281 def makefilename(ctx, pat, **props):
1282 1282 if not pat:
1283 1283 return pat
1284 1284 tmpl = _buildfntemplate(pat, **props)
1285 1285 # BUG: alias expansion shouldn't be made against template fragments
1286 1286 # rewritten from %-format strings, but we have no easy way to partially
1287 1287 # disable the expansion.
1288 1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1289 1289
1290 1290
1291 1291 def isstdiofilename(pat):
1292 1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1293 1293 return not pat or pat == b'-'
1294 1294
1295 1295
1296 1296 class _unclosablefile(object):
1297 1297 def __init__(self, fp):
1298 1298 self._fp = fp
1299 1299
1300 1300 def close(self):
1301 1301 pass
1302 1302
1303 1303 def __iter__(self):
1304 1304 return iter(self._fp)
1305 1305
1306 1306 def __getattr__(self, attr):
1307 1307 return getattr(self._fp, attr)
1308 1308
1309 1309 def __enter__(self):
1310 1310 return self
1311 1311
1312 1312 def __exit__(self, exc_type, exc_value, exc_tb):
1313 1313 pass
1314 1314
1315 1315
1316 1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1317 1317 writable = mode not in (b'r', b'rb')
1318 1318
1319 1319 if isstdiofilename(pat):
1320 1320 repo = ctx.repo()
1321 1321 if writable:
1322 1322 fp = repo.ui.fout
1323 1323 else:
1324 1324 fp = repo.ui.fin
1325 1325 return _unclosablefile(fp)
1326 1326 fn = makefilename(ctx, pat, **props)
1327 1327 return open(fn, mode)
1328 1328
1329 1329
1330 1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1331 1331 """opens the changelog, manifest, a filelog or a given revlog"""
1332 1332 cl = opts[b'changelog']
1333 1333 mf = opts[b'manifest']
1334 1334 dir = opts[b'dir']
1335 1335 msg = None
1336 1336 if cl and mf:
1337 1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1338 1338 elif cl and dir:
1339 1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1340 1340 elif cl or mf or dir:
1341 1341 if file_:
1342 1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1343 1343 elif not repo:
1344 1344 msg = _(
1345 1345 b'cannot specify --changelog or --manifest or --dir '
1346 1346 b'without a repository'
1347 1347 )
1348 1348 if msg:
1349 1349 raise error.Abort(msg)
1350 1350
1351 1351 r = None
1352 1352 if repo:
1353 1353 if cl:
1354 1354 r = repo.unfiltered().changelog
1355 1355 elif dir:
1356 1356 if b'treemanifest' not in repo.requirements:
1357 1357 raise error.Abort(
1358 1358 _(
1359 1359 b"--dir can only be used on repos with "
1360 1360 b"treemanifest enabled"
1361 1361 )
1362 1362 )
1363 1363 if not dir.endswith(b'/'):
1364 1364 dir = dir + b'/'
1365 1365 dirlog = repo.manifestlog.getstorage(dir)
1366 1366 if len(dirlog):
1367 1367 r = dirlog
1368 1368 elif mf:
1369 1369 r = repo.manifestlog.getstorage(b'')
1370 1370 elif file_:
1371 1371 filelog = repo.file(file_)
1372 1372 if len(filelog):
1373 1373 r = filelog
1374 1374
1375 1375 # Not all storage may be revlogs. If requested, try to return an actual
1376 1376 # revlog instance.
1377 1377 if returnrevlog:
1378 1378 if isinstance(r, revlog.revlog):
1379 1379 pass
1380 1380 elif util.safehasattr(r, b'_revlog'):
1381 1381 r = r._revlog # pytype: disable=attribute-error
1382 1382 elif r is not None:
1383 1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1384 1384
1385 1385 if not r:
1386 1386 if not returnrevlog:
1387 1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1388 1388
1389 1389 if not file_:
1390 1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1391 1391 if not os.path.isfile(file_):
1392 1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1393 1393 r = revlog.revlog(
1394 1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1395 1395 )
1396 1396 return r
1397 1397
1398 1398
1399 1399 def openrevlog(repo, cmd, file_, opts):
1400 1400 """Obtain a revlog backing storage of an item.
1401 1401
1402 1402 This is similar to ``openstorage()`` except it always returns a revlog.
1403 1403
1404 1404 In most cases, a caller cares about the main storage object - not the
1405 1405 revlog backing it. Therefore, this function should only be used by code
1406 1406 that needs to examine low-level revlog implementation details. e.g. debug
1407 1407 commands.
1408 1408 """
1409 1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1410 1410
1411 1411
1412 1412 def copy(ui, repo, pats, opts, rename=False):
1413 1413 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1414 1414
1415 1415 # called with the repo lock held
1416 1416 #
1417 1417 # hgsep => pathname that uses "/" to separate directories
1418 1418 # ossep => pathname that uses os.sep to separate directories
1419 1419 cwd = repo.getcwd()
1420 1420 targets = {}
1421 1421 forget = opts.get(b"forget")
1422 1422 after = opts.get(b"after")
1423 1423 dryrun = opts.get(b"dry_run")
1424 1424 rev = opts.get(b'at_rev')
1425 1425 if rev:
1426 1426 if not forget and not after:
1427 1427 # TODO: Remove this restriction and make it also create the copy
1428 1428 # targets (and remove the rename source if rename==True).
1429 1429 raise error.Abort(_(b'--at-rev requires --after'))
1430 1430 ctx = scmutil.revsingle(repo, rev)
1431 1431 if len(ctx.parents()) > 1:
1432 1432 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1433 1433 else:
1434 1434 ctx = repo[None]
1435 1435
1436 1436 pctx = ctx.p1()
1437 1437
1438 1438 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1439 1439
1440 1440 if forget:
1441 1441 if ctx.rev() is None:
1442 1442 new_ctx = ctx
1443 1443 else:
1444 1444 if len(ctx.parents()) > 1:
1445 1445 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1446 1446 # avoid cycle context -> subrepo -> cmdutil
1447 1447 from . import context
1448 1448
1449 1449 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1450 1450 new_ctx = context.overlayworkingctx(repo)
1451 1451 new_ctx.setbase(ctx.p1())
1452 1452 mergemod.graft(repo, ctx, wctx=new_ctx)
1453 1453
1454 1454 match = scmutil.match(ctx, pats, opts)
1455 1455
1456 1456 current_copies = ctx.p1copies()
1457 1457 current_copies.update(ctx.p2copies())
1458 1458
1459 1459 uipathfn = scmutil.getuipathfn(repo)
1460 1460 for f in ctx.walk(match):
1461 1461 if f in current_copies:
1462 1462 new_ctx[f].markcopied(None)
1463 1463 elif match.exact(f):
1464 1464 ui.warn(
1465 1465 _(
1466 1466 b'%s: not unmarking as copy - file is not marked as copied\n'
1467 1467 )
1468 1468 % uipathfn(f)
1469 1469 )
1470 1470
1471 1471 if ctx.rev() is not None:
1472 1472 with repo.lock():
1473 1473 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1474 1474 new_node = mem_ctx.commit()
1475 1475
1476 1476 if repo.dirstate.p1() == ctx.node():
1477 1477 with repo.dirstate.parentchange():
1478 1478 scmutil.movedirstate(repo, repo[new_node])
1479 1479 replacements = {ctx.node(): [new_node]}
1480 1480 scmutil.cleanupnodes(
1481 1481 repo, replacements, b'uncopy', fixphase=True
1482 1482 )
1483 1483
1484 1484 return
1485 1485
1486 1486 pats = scmutil.expandpats(pats)
1487 1487 if not pats:
1488 1488 raise error.Abort(_(b'no source or destination specified'))
1489 1489 if len(pats) == 1:
1490 1490 raise error.Abort(_(b'no destination specified'))
1491 1491 dest = pats.pop()
1492 1492
1493 1493 def walkpat(pat):
1494 1494 srcs = []
1495 1495 m = scmutil.match(ctx, [pat], opts, globbed=True)
1496 1496 for abs in ctx.walk(m):
1497 1497 rel = uipathfn(abs)
1498 1498 exact = m.exact(abs)
1499 1499 if abs not in ctx:
1500 1500 if abs in pctx:
1501 1501 if not after:
1502 1502 if exact:
1503 1503 ui.warn(
1504 1504 _(
1505 1505 b'%s: not copying - file has been marked '
1506 1506 b'for remove\n'
1507 1507 )
1508 1508 % rel
1509 1509 )
1510 1510 continue
1511 1511 else:
1512 1512 if exact:
1513 1513 ui.warn(
1514 1514 _(b'%s: not copying - file is not managed\n') % rel
1515 1515 )
1516 1516 continue
1517 1517
1518 1518 # abs: hgsep
1519 1519 # rel: ossep
1520 1520 srcs.append((abs, rel, exact))
1521 1521 return srcs
1522 1522
1523 1523 if ctx.rev() is not None:
1524 1524 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1525 1525 absdest = pathutil.canonpath(repo.root, cwd, dest)
1526 1526 if ctx.hasdir(absdest):
1527 1527 raise error.Abort(
1528 1528 _(b'%s: --at-rev does not support a directory as destination')
1529 1529 % uipathfn(absdest)
1530 1530 )
1531 1531 if absdest not in ctx:
1532 1532 raise error.Abort(
1533 1533 _(b'%s: copy destination does not exist in %s')
1534 1534 % (uipathfn(absdest), ctx)
1535 1535 )
1536 1536
1537 1537 # avoid cycle context -> subrepo -> cmdutil
1538 1538 from . import context
1539 1539
1540 1540 copylist = []
1541 1541 for pat in pats:
1542 1542 srcs = walkpat(pat)
1543 1543 if not srcs:
1544 1544 continue
1545 1545 for abs, rel, exact in srcs:
1546 1546 copylist.append(abs)
1547 1547
1548 1548 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1549 1549 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1550 1550 # existing functions below.
1551 1551 if len(copylist) != 1:
1552 1552 raise error.Abort(_(b'--at-rev requires a single source'))
1553 1553
1554 1554 new_ctx = context.overlayworkingctx(repo)
1555 1555 new_ctx.setbase(ctx.p1())
1556 1556 mergemod.graft(repo, ctx, wctx=new_ctx)
1557 1557
1558 1558 new_ctx.markcopied(absdest, copylist[0])
1559 1559
1560 1560 with repo.lock():
1561 1561 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1562 1562 new_node = mem_ctx.commit()
1563 1563
1564 1564 if repo.dirstate.p1() == ctx.node():
1565 1565 with repo.dirstate.parentchange():
1566 1566 scmutil.movedirstate(repo, repo[new_node])
1567 1567 replacements = {ctx.node(): [new_node]}
1568 1568 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1569 1569
1570 1570 return
1571 1571
1572 1572 # abssrc: hgsep
1573 1573 # relsrc: ossep
1574 1574 # otarget: ossep
1575 1575 def copyfile(abssrc, relsrc, otarget, exact):
1576 1576 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1577 1577 if b'/' in abstarget:
1578 1578 # We cannot normalize abstarget itself, this would prevent
1579 1579 # case only renames, like a => A.
1580 1580 abspath, absname = abstarget.rsplit(b'/', 1)
1581 1581 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1582 1582 reltarget = repo.pathto(abstarget, cwd)
1583 1583 target = repo.wjoin(abstarget)
1584 1584 src = repo.wjoin(abssrc)
1585 1585 state = repo.dirstate[abstarget]
1586 1586
1587 1587 scmutil.checkportable(ui, abstarget)
1588 1588
1589 1589 # check for collisions
1590 1590 prevsrc = targets.get(abstarget)
1591 1591 if prevsrc is not None:
1592 1592 ui.warn(
1593 1593 _(b'%s: not overwriting - %s collides with %s\n')
1594 1594 % (
1595 1595 reltarget,
1596 1596 repo.pathto(abssrc, cwd),
1597 1597 repo.pathto(prevsrc, cwd),
1598 1598 )
1599 1599 )
1600 1600 return True # report a failure
1601 1601
1602 1602 # check for overwrites
1603 1603 exists = os.path.lexists(target)
1604 1604 samefile = False
1605 1605 if exists and abssrc != abstarget:
1606 1606 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1607 1607 abstarget
1608 1608 ):
1609 1609 if not rename:
1610 1610 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1611 1611 return True # report a failure
1612 1612 exists = False
1613 1613 samefile = True
1614 1614
1615 1615 if not after and exists or after and state in b'mn':
1616 1616 if not opts[b'force']:
1617 1617 if state in b'mn':
1618 1618 msg = _(b'%s: not overwriting - file already committed\n')
1619 1619 if after:
1620 1620 flags = b'--after --force'
1621 1621 else:
1622 1622 flags = b'--force'
1623 1623 if rename:
1624 1624 hint = (
1625 1625 _(
1626 1626 b"('hg rename %s' to replace the file by "
1627 1627 b'recording a rename)\n'
1628 1628 )
1629 1629 % flags
1630 1630 )
1631 1631 else:
1632 1632 hint = (
1633 1633 _(
1634 1634 b"('hg copy %s' to replace the file by "
1635 1635 b'recording a copy)\n'
1636 1636 )
1637 1637 % flags
1638 1638 )
1639 1639 else:
1640 1640 msg = _(b'%s: not overwriting - file exists\n')
1641 1641 if rename:
1642 1642 hint = _(
1643 1643 b"('hg rename --after' to record the rename)\n"
1644 1644 )
1645 1645 else:
1646 1646 hint = _(b"('hg copy --after' to record the copy)\n")
1647 1647 ui.warn(msg % reltarget)
1648 1648 ui.warn(hint)
1649 1649 return True # report a failure
1650 1650
1651 1651 if after:
1652 1652 if not exists:
1653 1653 if rename:
1654 1654 ui.warn(
1655 1655 _(b'%s: not recording move - %s does not exist\n')
1656 1656 % (relsrc, reltarget)
1657 1657 )
1658 1658 else:
1659 1659 ui.warn(
1660 1660 _(b'%s: not recording copy - %s does not exist\n')
1661 1661 % (relsrc, reltarget)
1662 1662 )
1663 1663 return True # report a failure
1664 1664 elif not dryrun:
1665 1665 try:
1666 1666 if exists:
1667 1667 os.unlink(target)
1668 1668 targetdir = os.path.dirname(target) or b'.'
1669 1669 if not os.path.isdir(targetdir):
1670 1670 os.makedirs(targetdir)
1671 1671 if samefile:
1672 1672 tmp = target + b"~hgrename"
1673 1673 os.rename(src, tmp)
1674 1674 os.rename(tmp, target)
1675 1675 else:
1676 1676 # Preserve stat info on renames, not on copies; this matches
1677 1677 # Linux CLI behavior.
1678 1678 util.copyfile(src, target, copystat=rename)
1679 1679 srcexists = True
1680 1680 except IOError as inst:
1681 1681 if inst.errno == errno.ENOENT:
1682 1682 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1683 1683 srcexists = False
1684 1684 else:
1685 1685 ui.warn(
1686 1686 _(b'%s: cannot copy - %s\n')
1687 1687 % (relsrc, encoding.strtolocal(inst.strerror))
1688 1688 )
1689 1689 return True # report a failure
1690 1690
1691 1691 if ui.verbose or not exact:
1692 1692 if rename:
1693 1693 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1694 1694 else:
1695 1695 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1696 1696
1697 1697 targets[abstarget] = abssrc
1698 1698
1699 1699 # fix up dirstate
1700 1700 scmutil.dirstatecopy(
1701 1701 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1702 1702 )
1703 1703 if rename and not dryrun:
1704 1704 if not after and srcexists and not samefile:
1705 1705 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1706 1706 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1707 1707 ctx.forget([abssrc])
1708 1708
1709 1709 # pat: ossep
1710 1710 # dest ossep
1711 1711 # srcs: list of (hgsep, hgsep, ossep, bool)
1712 1712 # return: function that takes hgsep and returns ossep
1713 1713 def targetpathfn(pat, dest, srcs):
1714 1714 if os.path.isdir(pat):
1715 1715 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1716 1716 abspfx = util.localpath(abspfx)
1717 1717 if destdirexists:
1718 1718 striplen = len(os.path.split(abspfx)[0])
1719 1719 else:
1720 1720 striplen = len(abspfx)
1721 1721 if striplen:
1722 1722 striplen += len(pycompat.ossep)
1723 1723 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1724 1724 elif destdirexists:
1725 1725 res = lambda p: os.path.join(
1726 1726 dest, os.path.basename(util.localpath(p))
1727 1727 )
1728 1728 else:
1729 1729 res = lambda p: dest
1730 1730 return res
1731 1731
1732 1732 # pat: ossep
1733 1733 # dest ossep
1734 1734 # srcs: list of (hgsep, hgsep, ossep, bool)
1735 1735 # return: function that takes hgsep and returns ossep
1736 1736 def targetpathafterfn(pat, dest, srcs):
1737 1737 if matchmod.patkind(pat):
1738 1738 # a mercurial pattern
1739 1739 res = lambda p: os.path.join(
1740 1740 dest, os.path.basename(util.localpath(p))
1741 1741 )
1742 1742 else:
1743 1743 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1744 1744 if len(abspfx) < len(srcs[0][0]):
1745 1745 # A directory. Either the target path contains the last
1746 1746 # component of the source path or it does not.
1747 1747 def evalpath(striplen):
1748 1748 score = 0
1749 1749 for s in srcs:
1750 1750 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1751 1751 if os.path.lexists(t):
1752 1752 score += 1
1753 1753 return score
1754 1754
1755 1755 abspfx = util.localpath(abspfx)
1756 1756 striplen = len(abspfx)
1757 1757 if striplen:
1758 1758 striplen += len(pycompat.ossep)
1759 1759 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1760 1760 score = evalpath(striplen)
1761 1761 striplen1 = len(os.path.split(abspfx)[0])
1762 1762 if striplen1:
1763 1763 striplen1 += len(pycompat.ossep)
1764 1764 if evalpath(striplen1) > score:
1765 1765 striplen = striplen1
1766 1766 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1767 1767 else:
1768 1768 # a file
1769 1769 if destdirexists:
1770 1770 res = lambda p: os.path.join(
1771 1771 dest, os.path.basename(util.localpath(p))
1772 1772 )
1773 1773 else:
1774 1774 res = lambda p: dest
1775 1775 return res
1776 1776
1777 1777 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1778 1778 if not destdirexists:
1779 1779 if len(pats) > 1 or matchmod.patkind(pats[0]):
1780 1780 raise error.Abort(
1781 1781 _(
1782 1782 b'with multiple sources, destination must be an '
1783 1783 b'existing directory'
1784 1784 )
1785 1785 )
1786 1786 if util.endswithsep(dest):
1787 1787 raise error.Abort(_(b'destination %s is not a directory') % dest)
1788 1788
1789 1789 tfn = targetpathfn
1790 1790 if after:
1791 1791 tfn = targetpathafterfn
1792 1792 copylist = []
1793 1793 for pat in pats:
1794 1794 srcs = walkpat(pat)
1795 1795 if not srcs:
1796 1796 continue
1797 1797 copylist.append((tfn(pat, dest, srcs), srcs))
1798 1798 if not copylist:
1799 1799 raise error.Abort(_(b'no files to copy'))
1800 1800
1801 1801 errors = 0
1802 1802 for targetpath, srcs in copylist:
1803 1803 for abssrc, relsrc, exact in srcs:
1804 1804 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1805 1805 errors += 1
1806 1806
1807 1807 return errors != 0
1808 1808
1809 1809
1810 1810 ## facility to let extension process additional data into an import patch
1811 1811 # list of identifier to be executed in order
1812 1812 extrapreimport = [] # run before commit
1813 1813 extrapostimport = [] # run after commit
1814 1814 # mapping from identifier to actual import function
1815 1815 #
1816 1816 # 'preimport' are run before the commit is made and are provided the following
1817 1817 # arguments:
1818 1818 # - repo: the localrepository instance,
1819 1819 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1820 1820 # - extra: the future extra dictionary of the changeset, please mutate it,
1821 1821 # - opts: the import options.
1822 1822 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1823 1823 # mutation of in memory commit and more. Feel free to rework the code to get
1824 1824 # there.
1825 1825 extrapreimportmap = {}
1826 1826 # 'postimport' are run after the commit is made and are provided the following
1827 1827 # argument:
1828 1828 # - ctx: the changectx created by import.
1829 1829 extrapostimportmap = {}
1830 1830
1831 1831
1832 1832 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1833 1833 """Utility function used by commands.import to import a single patch
1834 1834
1835 1835 This function is explicitly defined here to help the evolve extension to
1836 1836 wrap this part of the import logic.
1837 1837
1838 1838 The API is currently a bit ugly because it a simple code translation from
1839 1839 the import command. Feel free to make it better.
1840 1840
1841 1841 :patchdata: a dictionary containing parsed patch data (such as from
1842 1842 ``patch.extract()``)
1843 1843 :parents: nodes that will be parent of the created commit
1844 1844 :opts: the full dict of option passed to the import command
1845 1845 :msgs: list to save commit message to.
1846 1846 (used in case we need to save it when failing)
1847 1847 :updatefunc: a function that update a repo to a given node
1848 1848 updatefunc(<repo>, <node>)
1849 1849 """
1850 1850 # avoid cycle context -> subrepo -> cmdutil
1851 1851 from . import context
1852 1852
1853 1853 tmpname = patchdata.get(b'filename')
1854 1854 message = patchdata.get(b'message')
1855 1855 user = opts.get(b'user') or patchdata.get(b'user')
1856 1856 date = opts.get(b'date') or patchdata.get(b'date')
1857 1857 branch = patchdata.get(b'branch')
1858 1858 nodeid = patchdata.get(b'nodeid')
1859 1859 p1 = patchdata.get(b'p1')
1860 1860 p2 = patchdata.get(b'p2')
1861 1861
1862 1862 nocommit = opts.get(b'no_commit')
1863 1863 importbranch = opts.get(b'import_branch')
1864 1864 update = not opts.get(b'bypass')
1865 1865 strip = opts[b"strip"]
1866 1866 prefix = opts[b"prefix"]
1867 1867 sim = float(opts.get(b'similarity') or 0)
1868 1868
1869 1869 if not tmpname:
1870 1870 return None, None, False
1871 1871
1872 1872 rejects = False
1873 1873
1874 1874 cmdline_message = logmessage(ui, opts)
1875 1875 if cmdline_message:
1876 1876 # pickup the cmdline msg
1877 1877 message = cmdline_message
1878 1878 elif message:
1879 1879 # pickup the patch msg
1880 1880 message = message.strip()
1881 1881 else:
1882 1882 # launch the editor
1883 1883 message = None
1884 1884 ui.debug(b'message:\n%s\n' % (message or b''))
1885 1885
1886 1886 if len(parents) == 1:
1887 1887 parents.append(repo[nullid])
1888 1888 if opts.get(b'exact'):
1889 1889 if not nodeid or not p1:
1890 1890 raise error.Abort(_(b'not a Mercurial patch'))
1891 1891 p1 = repo[p1]
1892 1892 p2 = repo[p2 or nullid]
1893 1893 elif p2:
1894 1894 try:
1895 1895 p1 = repo[p1]
1896 1896 p2 = repo[p2]
1897 1897 # Without any options, consider p2 only if the
1898 1898 # patch is being applied on top of the recorded
1899 1899 # first parent.
1900 1900 if p1 != parents[0]:
1901 1901 p1 = parents[0]
1902 1902 p2 = repo[nullid]
1903 1903 except error.RepoError:
1904 1904 p1, p2 = parents
1905 1905 if p2.node() == nullid:
1906 1906 ui.warn(
1907 1907 _(
1908 1908 b"warning: import the patch as a normal revision\n"
1909 1909 b"(use --exact to import the patch as a merge)\n"
1910 1910 )
1911 1911 )
1912 1912 else:
1913 1913 p1, p2 = parents
1914 1914
1915 1915 n = None
1916 1916 if update:
1917 1917 if p1 != parents[0]:
1918 1918 updatefunc(repo, p1.node())
1919 1919 if p2 != parents[1]:
1920 1920 repo.setparents(p1.node(), p2.node())
1921 1921
1922 1922 if opts.get(b'exact') or importbranch:
1923 1923 repo.dirstate.setbranch(branch or b'default')
1924 1924
1925 1925 partial = opts.get(b'partial', False)
1926 1926 files = set()
1927 1927 try:
1928 1928 patch.patch(
1929 1929 ui,
1930 1930 repo,
1931 1931 tmpname,
1932 1932 strip=strip,
1933 1933 prefix=prefix,
1934 1934 files=files,
1935 1935 eolmode=None,
1936 1936 similarity=sim / 100.0,
1937 1937 )
1938 1938 except error.PatchError as e:
1939 1939 if not partial:
1940 1940 raise error.Abort(pycompat.bytestr(e))
1941 1941 if partial:
1942 1942 rejects = True
1943 1943
1944 1944 files = list(files)
1945 1945 if nocommit:
1946 1946 if message:
1947 1947 msgs.append(message)
1948 1948 else:
1949 1949 if opts.get(b'exact') or p2:
1950 1950 # If you got here, you either use --force and know what
1951 1951 # you are doing or used --exact or a merge patch while
1952 1952 # being updated to its first parent.
1953 1953 m = None
1954 1954 else:
1955 1955 m = scmutil.matchfiles(repo, files or [])
1956 1956 editform = mergeeditform(repo[None], b'import.normal')
1957 1957 if opts.get(b'exact'):
1958 1958 editor = None
1959 1959 else:
1960 1960 editor = getcommiteditor(
1961 1961 editform=editform, **pycompat.strkwargs(opts)
1962 1962 )
1963 1963 extra = {}
1964 1964 for idfunc in extrapreimport:
1965 1965 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1966 1966 overrides = {}
1967 1967 if partial:
1968 1968 overrides[(b'ui', b'allowemptycommit')] = True
1969 1969 if opts.get(b'secret'):
1970 1970 overrides[(b'phases', b'new-commit')] = b'secret'
1971 1971 with repo.ui.configoverride(overrides, b'import'):
1972 1972 n = repo.commit(
1973 1973 message, user, date, match=m, editor=editor, extra=extra
1974 1974 )
1975 1975 for idfunc in extrapostimport:
1976 1976 extrapostimportmap[idfunc](repo[n])
1977 1977 else:
1978 1978 if opts.get(b'exact') or importbranch:
1979 1979 branch = branch or b'default'
1980 1980 else:
1981 1981 branch = p1.branch()
1982 1982 store = patch.filestore()
1983 1983 try:
1984 1984 files = set()
1985 1985 try:
1986 1986 patch.patchrepo(
1987 1987 ui,
1988 1988 repo,
1989 1989 p1,
1990 1990 store,
1991 1991 tmpname,
1992 1992 strip,
1993 1993 prefix,
1994 1994 files,
1995 1995 eolmode=None,
1996 1996 )
1997 1997 except error.PatchError as e:
1998 1998 raise error.Abort(stringutil.forcebytestr(e))
1999 1999 if opts.get(b'exact'):
2000 2000 editor = None
2001 2001 else:
2002 2002 editor = getcommiteditor(editform=b'import.bypass')
2003 2003 memctx = context.memctx(
2004 2004 repo,
2005 2005 (p1.node(), p2.node()),
2006 2006 message,
2007 2007 files=files,
2008 2008 filectxfn=store,
2009 2009 user=user,
2010 2010 date=date,
2011 2011 branch=branch,
2012 2012 editor=editor,
2013 2013 )
2014 n = memctx.commit()
2014
2015 overrides = {}
2016 if opts.get(b'secret'):
2017 overrides[(b'phases', b'new-commit')] = b'secret'
2018 with repo.ui.configoverride(overrides, b'import'):
2019 n = memctx.commit()
2015 2020 finally:
2016 2021 store.close()
2017 2022 if opts.get(b'exact') and nocommit:
2018 2023 # --exact with --no-commit is still useful in that it does merge
2019 2024 # and branch bits
2020 2025 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2021 2026 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2022 2027 raise error.Abort(_(b'patch is damaged or loses information'))
2023 2028 msg = _(b'applied to working directory')
2024 2029 if n:
2025 2030 # i18n: refers to a short changeset id
2026 2031 msg = _(b'created %s') % short(n)
2027 2032 return msg, n, rejects
2028 2033
2029 2034
2030 2035 # facility to let extensions include additional data in an exported patch
2031 2036 # list of identifiers to be executed in order
2032 2037 extraexport = []
2033 2038 # mapping from identifier to actual export function
2034 2039 # function as to return a string to be added to the header or None
2035 2040 # it is given two arguments (sequencenumber, changectx)
2036 2041 extraexportmap = {}
2037 2042
2038 2043
2039 2044 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2040 2045 node = scmutil.binnode(ctx)
2041 2046 parents = [p.node() for p in ctx.parents() if p]
2042 2047 branch = ctx.branch()
2043 2048 if switch_parent:
2044 2049 parents.reverse()
2045 2050
2046 2051 if parents:
2047 2052 prev = parents[0]
2048 2053 else:
2049 2054 prev = nullid
2050 2055
2051 2056 fm.context(ctx=ctx)
2052 2057 fm.plain(b'# HG changeset patch\n')
2053 2058 fm.write(b'user', b'# User %s\n', ctx.user())
2054 2059 fm.plain(b'# Date %d %d\n' % ctx.date())
2055 2060 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2056 2061 fm.condwrite(
2057 2062 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2058 2063 )
2059 2064 fm.write(b'node', b'# Node ID %s\n', hex(node))
2060 2065 fm.plain(b'# Parent %s\n' % hex(prev))
2061 2066 if len(parents) > 1:
2062 2067 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2063 2068 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2064 2069
2065 2070 # TODO: redesign extraexportmap function to support formatter
2066 2071 for headerid in extraexport:
2067 2072 header = extraexportmap[headerid](seqno, ctx)
2068 2073 if header is not None:
2069 2074 fm.plain(b'# %s\n' % header)
2070 2075
2071 2076 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2072 2077 fm.plain(b'\n')
2073 2078
2074 2079 if fm.isplain():
2075 2080 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2076 2081 for chunk, label in chunkiter:
2077 2082 fm.plain(chunk, label=label)
2078 2083 else:
2079 2084 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2080 2085 # TODO: make it structured?
2081 2086 fm.data(diff=b''.join(chunkiter))
2082 2087
2083 2088
2084 2089 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2085 2090 """Export changesets to stdout or a single file"""
2086 2091 for seqno, rev in enumerate(revs, 1):
2087 2092 ctx = repo[rev]
2088 2093 if not dest.startswith(b'<'):
2089 2094 repo.ui.note(b"%s\n" % dest)
2090 2095 fm.startitem()
2091 2096 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2092 2097
2093 2098
2094 2099 def _exportfntemplate(
2095 2100 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2096 2101 ):
2097 2102 """Export changesets to possibly multiple files"""
2098 2103 total = len(revs)
2099 2104 revwidth = max(len(str(rev)) for rev in revs)
2100 2105 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2101 2106
2102 2107 for seqno, rev in enumerate(revs, 1):
2103 2108 ctx = repo[rev]
2104 2109 dest = makefilename(
2105 2110 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2106 2111 )
2107 2112 filemap.setdefault(dest, []).append((seqno, rev))
2108 2113
2109 2114 for dest in filemap:
2110 2115 with formatter.maybereopen(basefm, dest) as fm:
2111 2116 repo.ui.note(b"%s\n" % dest)
2112 2117 for seqno, rev in filemap[dest]:
2113 2118 fm.startitem()
2114 2119 ctx = repo[rev]
2115 2120 _exportsingle(
2116 2121 repo, ctx, fm, match, switch_parent, seqno, diffopts
2117 2122 )
2118 2123
2119 2124
2120 2125 def _prefetchchangedfiles(repo, revs, match):
2121 2126 allfiles = set()
2122 2127 for rev in revs:
2123 2128 for file in repo[rev].files():
2124 2129 if not match or match(file):
2125 2130 allfiles.add(file)
2126 2131 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2127 2132
2128 2133
2129 2134 def export(
2130 2135 repo,
2131 2136 revs,
2132 2137 basefm,
2133 2138 fntemplate=b'hg-%h.patch',
2134 2139 switch_parent=False,
2135 2140 opts=None,
2136 2141 match=None,
2137 2142 ):
2138 2143 '''export changesets as hg patches
2139 2144
2140 2145 Args:
2141 2146 repo: The repository from which we're exporting revisions.
2142 2147 revs: A list of revisions to export as revision numbers.
2143 2148 basefm: A formatter to which patches should be written.
2144 2149 fntemplate: An optional string to use for generating patch file names.
2145 2150 switch_parent: If True, show diffs against second parent when not nullid.
2146 2151 Default is false, which always shows diff against p1.
2147 2152 opts: diff options to use for generating the patch.
2148 2153 match: If specified, only export changes to files matching this matcher.
2149 2154
2150 2155 Returns:
2151 2156 Nothing.
2152 2157
2153 2158 Side Effect:
2154 2159 "HG Changeset Patch" data is emitted to one of the following
2155 2160 destinations:
2156 2161 fntemplate specified: Each rev is written to a unique file named using
2157 2162 the given template.
2158 2163 Otherwise: All revs will be written to basefm.
2159 2164 '''
2160 2165 _prefetchchangedfiles(repo, revs, match)
2161 2166
2162 2167 if not fntemplate:
2163 2168 _exportfile(
2164 2169 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2165 2170 )
2166 2171 else:
2167 2172 _exportfntemplate(
2168 2173 repo, revs, basefm, fntemplate, switch_parent, opts, match
2169 2174 )
2170 2175
2171 2176
2172 2177 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2173 2178 """Export changesets to the given file stream"""
2174 2179 _prefetchchangedfiles(repo, revs, match)
2175 2180
2176 2181 dest = getattr(fp, 'name', b'<unnamed>')
2177 2182 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2178 2183 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2179 2184
2180 2185
2181 2186 def showmarker(fm, marker, index=None):
2182 2187 """utility function to display obsolescence marker in a readable way
2183 2188
2184 2189 To be used by debug function."""
2185 2190 if index is not None:
2186 2191 fm.write(b'index', b'%i ', index)
2187 2192 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2188 2193 succs = marker.succnodes()
2189 2194 fm.condwrite(
2190 2195 succs,
2191 2196 b'succnodes',
2192 2197 b'%s ',
2193 2198 fm.formatlist(map(hex, succs), name=b'node'),
2194 2199 )
2195 2200 fm.write(b'flag', b'%X ', marker.flags())
2196 2201 parents = marker.parentnodes()
2197 2202 if parents is not None:
2198 2203 fm.write(
2199 2204 b'parentnodes',
2200 2205 b'{%s} ',
2201 2206 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2202 2207 )
2203 2208 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2204 2209 meta = marker.metadata().copy()
2205 2210 meta.pop(b'date', None)
2206 2211 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2207 2212 fm.write(
2208 2213 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2209 2214 )
2210 2215 fm.plain(b'\n')
2211 2216
2212 2217
2213 2218 def finddate(ui, repo, date):
2214 2219 """Find the tipmost changeset that matches the given date spec"""
2215 2220
2216 2221 df = dateutil.matchdate(date)
2217 2222 m = scmutil.matchall(repo)
2218 2223 results = {}
2219 2224
2220 2225 def prep(ctx, fns):
2221 2226 d = ctx.date()
2222 2227 if df(d[0]):
2223 2228 results[ctx.rev()] = d
2224 2229
2225 2230 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2226 2231 rev = ctx.rev()
2227 2232 if rev in results:
2228 2233 ui.status(
2229 2234 _(b"found revision %d from %s\n")
2230 2235 % (rev, dateutil.datestr(results[rev]))
2231 2236 )
2232 2237 return b'%d' % rev
2233 2238
2234 2239 raise error.Abort(_(b"revision matching date not found"))
2235 2240
2236 2241
2237 2242 def increasingwindows(windowsize=8, sizelimit=512):
2238 2243 while True:
2239 2244 yield windowsize
2240 2245 if windowsize < sizelimit:
2241 2246 windowsize *= 2
2242 2247
2243 2248
2244 2249 def _walkrevs(repo, opts):
2245 2250 # Default --rev value depends on --follow but --follow behavior
2246 2251 # depends on revisions resolved from --rev...
2247 2252 follow = opts.get(b'follow') or opts.get(b'follow_first')
2248 2253 if opts.get(b'rev'):
2249 2254 revs = scmutil.revrange(repo, opts[b'rev'])
2250 2255 elif follow and repo.dirstate.p1() == nullid:
2251 2256 revs = smartset.baseset()
2252 2257 elif follow:
2253 2258 revs = repo.revs(b'reverse(:.)')
2254 2259 else:
2255 2260 revs = smartset.spanset(repo)
2256 2261 revs.reverse()
2257 2262 return revs
2258 2263
2259 2264
2260 2265 class FileWalkError(Exception):
2261 2266 pass
2262 2267
2263 2268
2264 2269 def walkfilerevs(repo, match, follow, revs, fncache):
2265 2270 '''Walks the file history for the matched files.
2266 2271
2267 2272 Returns the changeset revs that are involved in the file history.
2268 2273
2269 2274 Throws FileWalkError if the file history can't be walked using
2270 2275 filelogs alone.
2271 2276 '''
2272 2277 wanted = set()
2273 2278 copies = []
2274 2279 minrev, maxrev = min(revs), max(revs)
2275 2280
2276 2281 def filerevs(filelog, last):
2277 2282 """
2278 2283 Only files, no patterns. Check the history of each file.
2279 2284
2280 2285 Examines filelog entries within minrev, maxrev linkrev range
2281 2286 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2282 2287 tuples in backwards order
2283 2288 """
2284 2289 cl_count = len(repo)
2285 2290 revs = []
2286 2291 for j in pycompat.xrange(0, last + 1):
2287 2292 linkrev = filelog.linkrev(j)
2288 2293 if linkrev < minrev:
2289 2294 continue
2290 2295 # only yield rev for which we have the changelog, it can
2291 2296 # happen while doing "hg log" during a pull or commit
2292 2297 if linkrev >= cl_count:
2293 2298 break
2294 2299
2295 2300 parentlinkrevs = []
2296 2301 for p in filelog.parentrevs(j):
2297 2302 if p != nullrev:
2298 2303 parentlinkrevs.append(filelog.linkrev(p))
2299 2304 n = filelog.node(j)
2300 2305 revs.append(
2301 2306 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2302 2307 )
2303 2308
2304 2309 return reversed(revs)
2305 2310
2306 2311 def iterfiles():
2307 2312 pctx = repo[b'.']
2308 2313 for filename in match.files():
2309 2314 if follow:
2310 2315 if filename not in pctx:
2311 2316 raise error.Abort(
2312 2317 _(
2313 2318 b'cannot follow file not in parent '
2314 2319 b'revision: "%s"'
2315 2320 )
2316 2321 % filename
2317 2322 )
2318 2323 yield filename, pctx[filename].filenode()
2319 2324 else:
2320 2325 yield filename, None
2321 2326 for filename_node in copies:
2322 2327 yield filename_node
2323 2328
2324 2329 for file_, node in iterfiles():
2325 2330 filelog = repo.file(file_)
2326 2331 if not len(filelog):
2327 2332 if node is None:
2328 2333 # A zero count may be a directory or deleted file, so
2329 2334 # try to find matching entries on the slow path.
2330 2335 if follow:
2331 2336 raise error.Abort(
2332 2337 _(b'cannot follow nonexistent file: "%s"') % file_
2333 2338 )
2334 2339 raise FileWalkError(b"Cannot walk via filelog")
2335 2340 else:
2336 2341 continue
2337 2342
2338 2343 if node is None:
2339 2344 last = len(filelog) - 1
2340 2345 else:
2341 2346 last = filelog.rev(node)
2342 2347
2343 2348 # keep track of all ancestors of the file
2344 2349 ancestors = {filelog.linkrev(last)}
2345 2350
2346 2351 # iterate from latest to oldest revision
2347 2352 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2348 2353 if not follow:
2349 2354 if rev > maxrev:
2350 2355 continue
2351 2356 else:
2352 2357 # Note that last might not be the first interesting
2353 2358 # rev to us:
2354 2359 # if the file has been changed after maxrev, we'll
2355 2360 # have linkrev(last) > maxrev, and we still need
2356 2361 # to explore the file graph
2357 2362 if rev not in ancestors:
2358 2363 continue
2359 2364 # XXX insert 1327 fix here
2360 2365 if flparentlinkrevs:
2361 2366 ancestors.update(flparentlinkrevs)
2362 2367
2363 2368 fncache.setdefault(rev, []).append(file_)
2364 2369 wanted.add(rev)
2365 2370 if copied:
2366 2371 copies.append(copied)
2367 2372
2368 2373 return wanted
2369 2374
2370 2375
2371 2376 class _followfilter(object):
2372 2377 def __init__(self, repo, onlyfirst=False):
2373 2378 self.repo = repo
2374 2379 self.startrev = nullrev
2375 2380 self.roots = set()
2376 2381 self.onlyfirst = onlyfirst
2377 2382
2378 2383 def match(self, rev):
2379 2384 def realparents(rev):
2380 2385 if self.onlyfirst:
2381 2386 return self.repo.changelog.parentrevs(rev)[0:1]
2382 2387 else:
2383 2388 return filter(
2384 2389 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2385 2390 )
2386 2391
2387 2392 if self.startrev == nullrev:
2388 2393 self.startrev = rev
2389 2394 return True
2390 2395
2391 2396 if rev > self.startrev:
2392 2397 # forward: all descendants
2393 2398 if not self.roots:
2394 2399 self.roots.add(self.startrev)
2395 2400 for parent in realparents(rev):
2396 2401 if parent in self.roots:
2397 2402 self.roots.add(rev)
2398 2403 return True
2399 2404 else:
2400 2405 # backwards: all parents
2401 2406 if not self.roots:
2402 2407 self.roots.update(realparents(self.startrev))
2403 2408 if rev in self.roots:
2404 2409 self.roots.remove(rev)
2405 2410 self.roots.update(realparents(rev))
2406 2411 return True
2407 2412
2408 2413 return False
2409 2414
2410 2415
2411 2416 def walkchangerevs(repo, match, opts, prepare):
2412 2417 '''Iterate over files and the revs in which they changed.
2413 2418
2414 2419 Callers most commonly need to iterate backwards over the history
2415 2420 in which they are interested. Doing so has awful (quadratic-looking)
2416 2421 performance, so we use iterators in a "windowed" way.
2417 2422
2418 2423 We walk a window of revisions in the desired order. Within the
2419 2424 window, we first walk forwards to gather data, then in the desired
2420 2425 order (usually backwards) to display it.
2421 2426
2422 2427 This function returns an iterator yielding contexts. Before
2423 2428 yielding each context, the iterator will first call the prepare
2424 2429 function on each context in the window in forward order.'''
2425 2430
2426 2431 allfiles = opts.get(b'all_files')
2427 2432 follow = opts.get(b'follow') or opts.get(b'follow_first')
2428 2433 revs = _walkrevs(repo, opts)
2429 2434 if not revs:
2430 2435 return []
2431 2436 wanted = set()
2432 2437 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2433 2438 fncache = {}
2434 2439 change = repo.__getitem__
2435 2440
2436 2441 # First step is to fill wanted, the set of revisions that we want to yield.
2437 2442 # When it does not induce extra cost, we also fill fncache for revisions in
2438 2443 # wanted: a cache of filenames that were changed (ctx.files()) and that
2439 2444 # match the file filtering conditions.
2440 2445
2441 2446 if match.always() or allfiles:
2442 2447 # No files, no patterns. Display all revs.
2443 2448 wanted = revs
2444 2449 elif not slowpath:
2445 2450 # We only have to read through the filelog to find wanted revisions
2446 2451
2447 2452 try:
2448 2453 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2449 2454 except FileWalkError:
2450 2455 slowpath = True
2451 2456
2452 2457 # We decided to fall back to the slowpath because at least one
2453 2458 # of the paths was not a file. Check to see if at least one of them
2454 2459 # existed in history, otherwise simply return
2455 2460 for path in match.files():
2456 2461 if path == b'.' or path in repo.store:
2457 2462 break
2458 2463 else:
2459 2464 return []
2460 2465
2461 2466 if slowpath:
2462 2467 # We have to read the changelog to match filenames against
2463 2468 # changed files
2464 2469
2465 2470 if follow:
2466 2471 raise error.Abort(
2467 2472 _(b'can only follow copies/renames for explicit filenames')
2468 2473 )
2469 2474
2470 2475 # The slow path checks files modified in every changeset.
2471 2476 # This is really slow on large repos, so compute the set lazily.
2472 2477 class lazywantedset(object):
2473 2478 def __init__(self):
2474 2479 self.set = set()
2475 2480 self.revs = set(revs)
2476 2481
2477 2482 # No need to worry about locality here because it will be accessed
2478 2483 # in the same order as the increasing window below.
2479 2484 def __contains__(self, value):
2480 2485 if value in self.set:
2481 2486 return True
2482 2487 elif not value in self.revs:
2483 2488 return False
2484 2489 else:
2485 2490 self.revs.discard(value)
2486 2491 ctx = change(value)
2487 2492 if allfiles:
2488 2493 matches = list(ctx.manifest().walk(match))
2489 2494 else:
2490 2495 matches = [f for f in ctx.files() if match(f)]
2491 2496 if matches:
2492 2497 fncache[value] = matches
2493 2498 self.set.add(value)
2494 2499 return True
2495 2500 return False
2496 2501
2497 2502 def discard(self, value):
2498 2503 self.revs.discard(value)
2499 2504 self.set.discard(value)
2500 2505
2501 2506 wanted = lazywantedset()
2502 2507
2503 2508 # it might be worthwhile to do this in the iterator if the rev range
2504 2509 # is descending and the prune args are all within that range
2505 2510 for rev in opts.get(b'prune', ()):
2506 2511 rev = repo[rev].rev()
2507 2512 ff = _followfilter(repo)
2508 2513 stop = min(revs[0], revs[-1])
2509 2514 for x in pycompat.xrange(rev, stop - 1, -1):
2510 2515 if ff.match(x):
2511 2516 wanted = wanted - [x]
2512 2517
2513 2518 # Now that wanted is correctly initialized, we can iterate over the
2514 2519 # revision range, yielding only revisions in wanted.
2515 2520 def iterate():
2516 2521 if follow and match.always():
2517 2522 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2518 2523
2519 2524 def want(rev):
2520 2525 return ff.match(rev) and rev in wanted
2521 2526
2522 2527 else:
2523 2528
2524 2529 def want(rev):
2525 2530 return rev in wanted
2526 2531
2527 2532 it = iter(revs)
2528 2533 stopiteration = False
2529 2534 for windowsize in increasingwindows():
2530 2535 nrevs = []
2531 2536 for i in pycompat.xrange(windowsize):
2532 2537 rev = next(it, None)
2533 2538 if rev is None:
2534 2539 stopiteration = True
2535 2540 break
2536 2541 elif want(rev):
2537 2542 nrevs.append(rev)
2538 2543 for rev in sorted(nrevs):
2539 2544 fns = fncache.get(rev)
2540 2545 ctx = change(rev)
2541 2546 if not fns:
2542 2547
2543 2548 def fns_generator():
2544 2549 if allfiles:
2545 2550
2546 2551 def bad(f, msg):
2547 2552 pass
2548 2553
2549 2554 for f in ctx.matches(matchmod.badmatch(match, bad)):
2550 2555 yield f
2551 2556 else:
2552 2557 for f in ctx.files():
2553 2558 if match(f):
2554 2559 yield f
2555 2560
2556 2561 fns = fns_generator()
2557 2562 prepare(ctx, fns)
2558 2563 for rev in nrevs:
2559 2564 yield change(rev)
2560 2565
2561 2566 if stopiteration:
2562 2567 break
2563 2568
2564 2569 return iterate()
2565 2570
2566 2571
2567 2572 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2568 2573 bad = []
2569 2574
2570 2575 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2571 2576 names = []
2572 2577 wctx = repo[None]
2573 2578 cca = None
2574 2579 abort, warn = scmutil.checkportabilityalert(ui)
2575 2580 if abort or warn:
2576 2581 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2577 2582
2578 2583 match = repo.narrowmatch(match, includeexact=True)
2579 2584 badmatch = matchmod.badmatch(match, badfn)
2580 2585 dirstate = repo.dirstate
2581 2586 # We don't want to just call wctx.walk here, since it would return a lot of
2582 2587 # clean files, which we aren't interested in and takes time.
2583 2588 for f in sorted(
2584 2589 dirstate.walk(
2585 2590 badmatch,
2586 2591 subrepos=sorted(wctx.substate),
2587 2592 unknown=True,
2588 2593 ignored=False,
2589 2594 full=False,
2590 2595 )
2591 2596 ):
2592 2597 exact = match.exact(f)
2593 2598 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2594 2599 if cca:
2595 2600 cca(f)
2596 2601 names.append(f)
2597 2602 if ui.verbose or not exact:
2598 2603 ui.status(
2599 2604 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2600 2605 )
2601 2606
2602 2607 for subpath in sorted(wctx.substate):
2603 2608 sub = wctx.sub(subpath)
2604 2609 try:
2605 2610 submatch = matchmod.subdirmatcher(subpath, match)
2606 2611 subprefix = repo.wvfs.reljoin(prefix, subpath)
2607 2612 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2608 2613 if opts.get('subrepos'):
2609 2614 bad.extend(
2610 2615 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2611 2616 )
2612 2617 else:
2613 2618 bad.extend(
2614 2619 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2615 2620 )
2616 2621 except error.LookupError:
2617 2622 ui.status(
2618 2623 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2619 2624 )
2620 2625
2621 2626 if not opts.get('dry_run'):
2622 2627 rejected = wctx.add(names, prefix)
2623 2628 bad.extend(f for f in rejected if f in match.files())
2624 2629 return bad
2625 2630
2626 2631
2627 2632 def addwebdirpath(repo, serverpath, webconf):
2628 2633 webconf[serverpath] = repo.root
2629 2634 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2630 2635
2631 2636 for r in repo.revs(b'filelog("path:.hgsub")'):
2632 2637 ctx = repo[r]
2633 2638 for subpath in ctx.substate:
2634 2639 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2635 2640
2636 2641
2637 2642 def forget(
2638 2643 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2639 2644 ):
2640 2645 if dryrun and interactive:
2641 2646 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2642 2647 bad = []
2643 2648 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2644 2649 wctx = repo[None]
2645 2650 forgot = []
2646 2651
2647 2652 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2648 2653 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2649 2654 if explicitonly:
2650 2655 forget = [f for f in forget if match.exact(f)]
2651 2656
2652 2657 for subpath in sorted(wctx.substate):
2653 2658 sub = wctx.sub(subpath)
2654 2659 submatch = matchmod.subdirmatcher(subpath, match)
2655 2660 subprefix = repo.wvfs.reljoin(prefix, subpath)
2656 2661 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2657 2662 try:
2658 2663 subbad, subforgot = sub.forget(
2659 2664 submatch,
2660 2665 subprefix,
2661 2666 subuipathfn,
2662 2667 dryrun=dryrun,
2663 2668 interactive=interactive,
2664 2669 )
2665 2670 bad.extend([subpath + b'/' + f for f in subbad])
2666 2671 forgot.extend([subpath + b'/' + f for f in subforgot])
2667 2672 except error.LookupError:
2668 2673 ui.status(
2669 2674 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2670 2675 )
2671 2676
2672 2677 if not explicitonly:
2673 2678 for f in match.files():
2674 2679 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2675 2680 if f not in forgot:
2676 2681 if repo.wvfs.exists(f):
2677 2682 # Don't complain if the exact case match wasn't given.
2678 2683 # But don't do this until after checking 'forgot', so
2679 2684 # that subrepo files aren't normalized, and this op is
2680 2685 # purely from data cached by the status walk above.
2681 2686 if repo.dirstate.normalize(f) in repo.dirstate:
2682 2687 continue
2683 2688 ui.warn(
2684 2689 _(
2685 2690 b'not removing %s: '
2686 2691 b'file is already untracked\n'
2687 2692 )
2688 2693 % uipathfn(f)
2689 2694 )
2690 2695 bad.append(f)
2691 2696
2692 2697 if interactive:
2693 2698 responses = _(
2694 2699 b'[Ynsa?]'
2695 2700 b'$$ &Yes, forget this file'
2696 2701 b'$$ &No, skip this file'
2697 2702 b'$$ &Skip remaining files'
2698 2703 b'$$ Include &all remaining files'
2699 2704 b'$$ &? (display help)'
2700 2705 )
2701 2706 for filename in forget[:]:
2702 2707 r = ui.promptchoice(
2703 2708 _(b'forget %s %s') % (uipathfn(filename), responses)
2704 2709 )
2705 2710 if r == 4: # ?
2706 2711 while r == 4:
2707 2712 for c, t in ui.extractchoices(responses)[1]:
2708 2713 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2709 2714 r = ui.promptchoice(
2710 2715 _(b'forget %s %s') % (uipathfn(filename), responses)
2711 2716 )
2712 2717 if r == 0: # yes
2713 2718 continue
2714 2719 elif r == 1: # no
2715 2720 forget.remove(filename)
2716 2721 elif r == 2: # Skip
2717 2722 fnindex = forget.index(filename)
2718 2723 del forget[fnindex:]
2719 2724 break
2720 2725 elif r == 3: # All
2721 2726 break
2722 2727
2723 2728 for f in forget:
2724 2729 if ui.verbose or not match.exact(f) or interactive:
2725 2730 ui.status(
2726 2731 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2727 2732 )
2728 2733
2729 2734 if not dryrun:
2730 2735 rejected = wctx.forget(forget, prefix)
2731 2736 bad.extend(f for f in rejected if f in match.files())
2732 2737 forgot.extend(f for f in forget if f not in rejected)
2733 2738 return bad, forgot
2734 2739
2735 2740
2736 2741 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2737 2742 ret = 1
2738 2743
2739 2744 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2740 2745 for f in ctx.matches(m):
2741 2746 fm.startitem()
2742 2747 fm.context(ctx=ctx)
2743 2748 if needsfctx:
2744 2749 fc = ctx[f]
2745 2750 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2746 2751 fm.data(path=f)
2747 2752 fm.plain(fmt % uipathfn(f))
2748 2753 ret = 0
2749 2754
2750 2755 for subpath in sorted(ctx.substate):
2751 2756 submatch = matchmod.subdirmatcher(subpath, m)
2752 2757 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2753 2758 if subrepos or m.exact(subpath) or any(submatch.files()):
2754 2759 sub = ctx.sub(subpath)
2755 2760 try:
2756 2761 recurse = m.exact(subpath) or subrepos
2757 2762 if (
2758 2763 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2759 2764 == 0
2760 2765 ):
2761 2766 ret = 0
2762 2767 except error.LookupError:
2763 2768 ui.status(
2764 2769 _(b"skipping missing subrepository: %s\n")
2765 2770 % uipathfn(subpath)
2766 2771 )
2767 2772
2768 2773 return ret
2769 2774
2770 2775
2771 2776 def remove(
2772 2777 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2773 2778 ):
2774 2779 ret = 0
2775 2780 s = repo.status(match=m, clean=True)
2776 2781 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2777 2782
2778 2783 wctx = repo[None]
2779 2784
2780 2785 if warnings is None:
2781 2786 warnings = []
2782 2787 warn = True
2783 2788 else:
2784 2789 warn = False
2785 2790
2786 2791 subs = sorted(wctx.substate)
2787 2792 progress = ui.makeprogress(
2788 2793 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2789 2794 )
2790 2795 for subpath in subs:
2791 2796 submatch = matchmod.subdirmatcher(subpath, m)
2792 2797 subprefix = repo.wvfs.reljoin(prefix, subpath)
2793 2798 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2794 2799 if subrepos or m.exact(subpath) or any(submatch.files()):
2795 2800 progress.increment()
2796 2801 sub = wctx.sub(subpath)
2797 2802 try:
2798 2803 if sub.removefiles(
2799 2804 submatch,
2800 2805 subprefix,
2801 2806 subuipathfn,
2802 2807 after,
2803 2808 force,
2804 2809 subrepos,
2805 2810 dryrun,
2806 2811 warnings,
2807 2812 ):
2808 2813 ret = 1
2809 2814 except error.LookupError:
2810 2815 warnings.append(
2811 2816 _(b"skipping missing subrepository: %s\n")
2812 2817 % uipathfn(subpath)
2813 2818 )
2814 2819 progress.complete()
2815 2820
2816 2821 # warn about failure to delete explicit files/dirs
2817 2822 deleteddirs = pathutil.dirs(deleted)
2818 2823 files = m.files()
2819 2824 progress = ui.makeprogress(
2820 2825 _(b'deleting'), total=len(files), unit=_(b'files')
2821 2826 )
2822 2827 for f in files:
2823 2828
2824 2829 def insubrepo():
2825 2830 for subpath in wctx.substate:
2826 2831 if f.startswith(subpath + b'/'):
2827 2832 return True
2828 2833 return False
2829 2834
2830 2835 progress.increment()
2831 2836 isdir = f in deleteddirs or wctx.hasdir(f)
2832 2837 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2833 2838 continue
2834 2839
2835 2840 if repo.wvfs.exists(f):
2836 2841 if repo.wvfs.isdir(f):
2837 2842 warnings.append(
2838 2843 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2839 2844 )
2840 2845 else:
2841 2846 warnings.append(
2842 2847 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2843 2848 )
2844 2849 # missing files will generate a warning elsewhere
2845 2850 ret = 1
2846 2851 progress.complete()
2847 2852
2848 2853 if force:
2849 2854 list = modified + deleted + clean + added
2850 2855 elif after:
2851 2856 list = deleted
2852 2857 remaining = modified + added + clean
2853 2858 progress = ui.makeprogress(
2854 2859 _(b'skipping'), total=len(remaining), unit=_(b'files')
2855 2860 )
2856 2861 for f in remaining:
2857 2862 progress.increment()
2858 2863 if ui.verbose or (f in files):
2859 2864 warnings.append(
2860 2865 _(b'not removing %s: file still exists\n') % uipathfn(f)
2861 2866 )
2862 2867 ret = 1
2863 2868 progress.complete()
2864 2869 else:
2865 2870 list = deleted + clean
2866 2871 progress = ui.makeprogress(
2867 2872 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2868 2873 )
2869 2874 for f in modified:
2870 2875 progress.increment()
2871 2876 warnings.append(
2872 2877 _(
2873 2878 b'not removing %s: file is modified (use -f'
2874 2879 b' to force removal)\n'
2875 2880 )
2876 2881 % uipathfn(f)
2877 2882 )
2878 2883 ret = 1
2879 2884 for f in added:
2880 2885 progress.increment()
2881 2886 warnings.append(
2882 2887 _(
2883 2888 b"not removing %s: file has been marked for add"
2884 2889 b" (use 'hg forget' to undo add)\n"
2885 2890 )
2886 2891 % uipathfn(f)
2887 2892 )
2888 2893 ret = 1
2889 2894 progress.complete()
2890 2895
2891 2896 list = sorted(list)
2892 2897 progress = ui.makeprogress(
2893 2898 _(b'deleting'), total=len(list), unit=_(b'files')
2894 2899 )
2895 2900 for f in list:
2896 2901 if ui.verbose or not m.exact(f):
2897 2902 progress.increment()
2898 2903 ui.status(
2899 2904 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2900 2905 )
2901 2906 progress.complete()
2902 2907
2903 2908 if not dryrun:
2904 2909 with repo.wlock():
2905 2910 if not after:
2906 2911 for f in list:
2907 2912 if f in added:
2908 2913 continue # we never unlink added files on remove
2909 2914 rmdir = repo.ui.configbool(
2910 2915 b'experimental', b'removeemptydirs'
2911 2916 )
2912 2917 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2913 2918 repo[None].forget(list)
2914 2919
2915 2920 if warn:
2916 2921 for warning in warnings:
2917 2922 ui.warn(warning)
2918 2923
2919 2924 return ret
2920 2925
2921 2926
2922 2927 def _catfmtneedsdata(fm):
2923 2928 return not fm.datahint() or b'data' in fm.datahint()
2924 2929
2925 2930
2926 2931 def _updatecatformatter(fm, ctx, matcher, path, decode):
2927 2932 """Hook for adding data to the formatter used by ``hg cat``.
2928 2933
2929 2934 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2930 2935 this method first."""
2931 2936
2932 2937 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2933 2938 # wasn't requested.
2934 2939 data = b''
2935 2940 if _catfmtneedsdata(fm):
2936 2941 data = ctx[path].data()
2937 2942 if decode:
2938 2943 data = ctx.repo().wwritedata(path, data)
2939 2944 fm.startitem()
2940 2945 fm.context(ctx=ctx)
2941 2946 fm.write(b'data', b'%s', data)
2942 2947 fm.data(path=path)
2943 2948
2944 2949
2945 2950 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2946 2951 err = 1
2947 2952 opts = pycompat.byteskwargs(opts)
2948 2953
2949 2954 def write(path):
2950 2955 filename = None
2951 2956 if fntemplate:
2952 2957 filename = makefilename(
2953 2958 ctx, fntemplate, pathname=os.path.join(prefix, path)
2954 2959 )
2955 2960 # attempt to create the directory if it does not already exist
2956 2961 try:
2957 2962 os.makedirs(os.path.dirname(filename))
2958 2963 except OSError:
2959 2964 pass
2960 2965 with formatter.maybereopen(basefm, filename) as fm:
2961 2966 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2962 2967
2963 2968 # Automation often uses hg cat on single files, so special case it
2964 2969 # for performance to avoid the cost of parsing the manifest.
2965 2970 if len(matcher.files()) == 1 and not matcher.anypats():
2966 2971 file = matcher.files()[0]
2967 2972 mfl = repo.manifestlog
2968 2973 mfnode = ctx.manifestnode()
2969 2974 try:
2970 2975 if mfnode and mfl[mfnode].find(file)[0]:
2971 2976 if _catfmtneedsdata(basefm):
2972 2977 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2973 2978 write(file)
2974 2979 return 0
2975 2980 except KeyError:
2976 2981 pass
2977 2982
2978 2983 if _catfmtneedsdata(basefm):
2979 2984 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2980 2985
2981 2986 for abs in ctx.walk(matcher):
2982 2987 write(abs)
2983 2988 err = 0
2984 2989
2985 2990 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2986 2991 for subpath in sorted(ctx.substate):
2987 2992 sub = ctx.sub(subpath)
2988 2993 try:
2989 2994 submatch = matchmod.subdirmatcher(subpath, matcher)
2990 2995 subprefix = os.path.join(prefix, subpath)
2991 2996 if not sub.cat(
2992 2997 submatch,
2993 2998 basefm,
2994 2999 fntemplate,
2995 3000 subprefix,
2996 3001 **pycompat.strkwargs(opts)
2997 3002 ):
2998 3003 err = 0
2999 3004 except error.RepoLookupError:
3000 3005 ui.status(
3001 3006 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3002 3007 )
3003 3008
3004 3009 return err
3005 3010
3006 3011
3007 3012 def commit(ui, repo, commitfunc, pats, opts):
3008 3013 '''commit the specified files or all outstanding changes'''
3009 3014 date = opts.get(b'date')
3010 3015 if date:
3011 3016 opts[b'date'] = dateutil.parsedate(date)
3012 3017 message = logmessage(ui, opts)
3013 3018 matcher = scmutil.match(repo[None], pats, opts)
3014 3019
3015 3020 dsguard = None
3016 3021 # extract addremove carefully -- this function can be called from a command
3017 3022 # that doesn't support addremove
3018 3023 if opts.get(b'addremove'):
3019 3024 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3020 3025 with dsguard or util.nullcontextmanager():
3021 3026 if dsguard:
3022 3027 relative = scmutil.anypats(pats, opts)
3023 3028 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3024 3029 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3025 3030 raise error.Abort(
3026 3031 _(b"failed to mark all new/missing files as added/removed")
3027 3032 )
3028 3033
3029 3034 return commitfunc(ui, repo, message, matcher, opts)
3030 3035
3031 3036
3032 3037 def samefile(f, ctx1, ctx2):
3033 3038 if f in ctx1.manifest():
3034 3039 a = ctx1.filectx(f)
3035 3040 if f in ctx2.manifest():
3036 3041 b = ctx2.filectx(f)
3037 3042 return not a.cmp(b) and a.flags() == b.flags()
3038 3043 else:
3039 3044 return False
3040 3045 else:
3041 3046 return f not in ctx2.manifest()
3042 3047
3043 3048
3044 3049 def amend(ui, repo, old, extra, pats, opts):
3045 3050 # avoid cycle context -> subrepo -> cmdutil
3046 3051 from . import context
3047 3052
3048 3053 # amend will reuse the existing user if not specified, but the obsolete
3049 3054 # marker creation requires that the current user's name is specified.
3050 3055 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3051 3056 ui.username() # raise exception if username not set
3052 3057
3053 3058 ui.note(_(b'amending changeset %s\n') % old)
3054 3059 base = old.p1()
3055 3060
3056 3061 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3057 3062 # Participating changesets:
3058 3063 #
3059 3064 # wctx o - workingctx that contains changes from working copy
3060 3065 # | to go into amending commit
3061 3066 # |
3062 3067 # old o - changeset to amend
3063 3068 # |
3064 3069 # base o - first parent of the changeset to amend
3065 3070 wctx = repo[None]
3066 3071
3067 3072 # Copy to avoid mutating input
3068 3073 extra = extra.copy()
3069 3074 # Update extra dict from amended commit (e.g. to preserve graft
3070 3075 # source)
3071 3076 extra.update(old.extra())
3072 3077
3073 3078 # Also update it from the from the wctx
3074 3079 extra.update(wctx.extra())
3075 3080
3076 3081 # date-only change should be ignored?
3077 3082 datemaydiffer = resolvecommitoptions(ui, opts)
3078 3083
3079 3084 date = old.date()
3080 3085 if opts.get(b'date'):
3081 3086 date = dateutil.parsedate(opts.get(b'date'))
3082 3087 user = opts.get(b'user') or old.user()
3083 3088
3084 3089 if len(old.parents()) > 1:
3085 3090 # ctx.files() isn't reliable for merges, so fall back to the
3086 3091 # slower repo.status() method
3087 3092 st = base.status(old)
3088 3093 files = set(st.modified) | set(st.added) | set(st.removed)
3089 3094 else:
3090 3095 files = set(old.files())
3091 3096
3092 3097 # add/remove the files to the working copy if the "addremove" option
3093 3098 # was specified.
3094 3099 matcher = scmutil.match(wctx, pats, opts)
3095 3100 relative = scmutil.anypats(pats, opts)
3096 3101 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3097 3102 if opts.get(b'addremove') and scmutil.addremove(
3098 3103 repo, matcher, b"", uipathfn, opts
3099 3104 ):
3100 3105 raise error.Abort(
3101 3106 _(b"failed to mark all new/missing files as added/removed")
3102 3107 )
3103 3108
3104 3109 # Check subrepos. This depends on in-place wctx._status update in
3105 3110 # subrepo.precommit(). To minimize the risk of this hack, we do
3106 3111 # nothing if .hgsub does not exist.
3107 3112 if b'.hgsub' in wctx or b'.hgsub' in old:
3108 3113 subs, commitsubs, newsubstate = subrepoutil.precommit(
3109 3114 ui, wctx, wctx._status, matcher
3110 3115 )
3111 3116 # amend should abort if commitsubrepos is enabled
3112 3117 assert not commitsubs
3113 3118 if subs:
3114 3119 subrepoutil.writestate(repo, newsubstate)
3115 3120
3116 3121 ms = mergemod.mergestate.read(repo)
3117 3122 mergeutil.checkunresolved(ms)
3118 3123
3119 3124 filestoamend = set(f for f in wctx.files() if matcher(f))
3120 3125
3121 3126 changes = len(filestoamend) > 0
3122 3127 if changes:
3123 3128 # Recompute copies (avoid recording a -> b -> a)
3124 3129 copied = copies.pathcopies(base, wctx, matcher)
3125 3130 if old.p2:
3126 3131 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3127 3132
3128 3133 # Prune files which were reverted by the updates: if old
3129 3134 # introduced file X and the file was renamed in the working
3130 3135 # copy, then those two files are the same and
3131 3136 # we can discard X from our list of files. Likewise if X
3132 3137 # was removed, it's no longer relevant. If X is missing (aka
3133 3138 # deleted), old X must be preserved.
3134 3139 files.update(filestoamend)
3135 3140 files = [
3136 3141 f
3137 3142 for f in files
3138 3143 if (f not in filestoamend or not samefile(f, wctx, base))
3139 3144 ]
3140 3145
3141 3146 def filectxfn(repo, ctx_, path):
3142 3147 try:
3143 3148 # If the file being considered is not amongst the files
3144 3149 # to be amended, we should return the file context from the
3145 3150 # old changeset. This avoids issues when only some files in
3146 3151 # the working copy are being amended but there are also
3147 3152 # changes to other files from the old changeset.
3148 3153 if path not in filestoamend:
3149 3154 return old.filectx(path)
3150 3155
3151 3156 # Return None for removed files.
3152 3157 if path in wctx.removed():
3153 3158 return None
3154 3159
3155 3160 fctx = wctx[path]
3156 3161 flags = fctx.flags()
3157 3162 mctx = context.memfilectx(
3158 3163 repo,
3159 3164 ctx_,
3160 3165 fctx.path(),
3161 3166 fctx.data(),
3162 3167 islink=b'l' in flags,
3163 3168 isexec=b'x' in flags,
3164 3169 copysource=copied.get(path),
3165 3170 )
3166 3171 return mctx
3167 3172 except KeyError:
3168 3173 return None
3169 3174
3170 3175 else:
3171 3176 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3172 3177
3173 3178 # Use version of files as in the old cset
3174 3179 def filectxfn(repo, ctx_, path):
3175 3180 try:
3176 3181 return old.filectx(path)
3177 3182 except KeyError:
3178 3183 return None
3179 3184
3180 3185 # See if we got a message from -m or -l, if not, open the editor with
3181 3186 # the message of the changeset to amend.
3182 3187 message = logmessage(ui, opts)
3183 3188
3184 3189 editform = mergeeditform(old, b'commit.amend')
3185 3190
3186 3191 if not message:
3187 3192 message = old.description()
3188 3193 # Default if message isn't provided and --edit is not passed is to
3189 3194 # invoke editor, but allow --no-edit. If somehow we don't have any
3190 3195 # description, let's always start the editor.
3191 3196 doedit = not message or opts.get(b'edit') in [True, None]
3192 3197 else:
3193 3198 # Default if message is provided is to not invoke editor, but allow
3194 3199 # --edit.
3195 3200 doedit = opts.get(b'edit') is True
3196 3201 editor = getcommiteditor(edit=doedit, editform=editform)
3197 3202
3198 3203 pureextra = extra.copy()
3199 3204 extra[b'amend_source'] = old.hex()
3200 3205
3201 3206 new = context.memctx(
3202 3207 repo,
3203 3208 parents=[base.node(), old.p2().node()],
3204 3209 text=message,
3205 3210 files=files,
3206 3211 filectxfn=filectxfn,
3207 3212 user=user,
3208 3213 date=date,
3209 3214 extra=extra,
3210 3215 editor=editor,
3211 3216 )
3212 3217
3213 3218 newdesc = changelog.stripdesc(new.description())
3214 3219 if (
3215 3220 (not changes)
3216 3221 and newdesc == old.description()
3217 3222 and user == old.user()
3218 3223 and (date == old.date() or datemaydiffer)
3219 3224 and pureextra == old.extra()
3220 3225 ):
3221 3226 # nothing changed. continuing here would create a new node
3222 3227 # anyway because of the amend_source noise.
3223 3228 #
3224 3229 # This not what we expect from amend.
3225 3230 return old.node()
3226 3231
3227 3232 commitphase = None
3228 3233 if opts.get(b'secret'):
3229 3234 commitphase = phases.secret
3230 3235 newid = repo.commitctx(new)
3231 3236
3232 3237 # Reroute the working copy parent to the new changeset
3233 3238 repo.setparents(newid, nullid)
3234 3239 mapping = {old.node(): (newid,)}
3235 3240 obsmetadata = None
3236 3241 if opts.get(b'note'):
3237 3242 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3238 3243 backup = ui.configbool(b'rewrite', b'backup-bundle')
3239 3244 scmutil.cleanupnodes(
3240 3245 repo,
3241 3246 mapping,
3242 3247 b'amend',
3243 3248 metadata=obsmetadata,
3244 3249 fixphase=True,
3245 3250 targetphase=commitphase,
3246 3251 backup=backup,
3247 3252 )
3248 3253
3249 3254 # Fixing the dirstate because localrepo.commitctx does not update
3250 3255 # it. This is rather convenient because we did not need to update
3251 3256 # the dirstate for all the files in the new commit which commitctx
3252 3257 # could have done if it updated the dirstate. Now, we can
3253 3258 # selectively update the dirstate only for the amended files.
3254 3259 dirstate = repo.dirstate
3255 3260
3256 3261 # Update the state of the files which were added and modified in the
3257 3262 # amend to "normal" in the dirstate. We need to use "normallookup" since
3258 3263 # the files may have changed since the command started; using "normal"
3259 3264 # would mark them as clean but with uncommitted contents.
3260 3265 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3261 3266 for f in normalfiles:
3262 3267 dirstate.normallookup(f)
3263 3268
3264 3269 # Update the state of files which were removed in the amend
3265 3270 # to "removed" in the dirstate.
3266 3271 removedfiles = set(wctx.removed()) & filestoamend
3267 3272 for f in removedfiles:
3268 3273 dirstate.drop(f)
3269 3274
3270 3275 return newid
3271 3276
3272 3277
3273 3278 def commiteditor(repo, ctx, subs, editform=b''):
3274 3279 if ctx.description():
3275 3280 return ctx.description()
3276 3281 return commitforceeditor(
3277 3282 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3278 3283 )
3279 3284
3280 3285
3281 3286 def commitforceeditor(
3282 3287 repo,
3283 3288 ctx,
3284 3289 subs,
3285 3290 finishdesc=None,
3286 3291 extramsg=None,
3287 3292 editform=b'',
3288 3293 unchangedmessagedetection=False,
3289 3294 ):
3290 3295 if not extramsg:
3291 3296 extramsg = _(b"Leave message empty to abort commit.")
3292 3297
3293 3298 forms = [e for e in editform.split(b'.') if e]
3294 3299 forms.insert(0, b'changeset')
3295 3300 templatetext = None
3296 3301 while forms:
3297 3302 ref = b'.'.join(forms)
3298 3303 if repo.ui.config(b'committemplate', ref):
3299 3304 templatetext = committext = buildcommittemplate(
3300 3305 repo, ctx, subs, extramsg, ref
3301 3306 )
3302 3307 break
3303 3308 forms.pop()
3304 3309 else:
3305 3310 committext = buildcommittext(repo, ctx, subs, extramsg)
3306 3311
3307 3312 # run editor in the repository root
3308 3313 olddir = encoding.getcwd()
3309 3314 os.chdir(repo.root)
3310 3315
3311 3316 # make in-memory changes visible to external process
3312 3317 tr = repo.currenttransaction()
3313 3318 repo.dirstate.write(tr)
3314 3319 pending = tr and tr.writepending() and repo.root
3315 3320
3316 3321 editortext = repo.ui.edit(
3317 3322 committext,
3318 3323 ctx.user(),
3319 3324 ctx.extra(),
3320 3325 editform=editform,
3321 3326 pending=pending,
3322 3327 repopath=repo.path,
3323 3328 action=b'commit',
3324 3329 )
3325 3330 text = editortext
3326 3331
3327 3332 # strip away anything below this special string (used for editors that want
3328 3333 # to display the diff)
3329 3334 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3330 3335 if stripbelow:
3331 3336 text = text[: stripbelow.start()]
3332 3337
3333 3338 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3334 3339 os.chdir(olddir)
3335 3340
3336 3341 if finishdesc:
3337 3342 text = finishdesc(text)
3338 3343 if not text.strip():
3339 3344 raise error.Abort(_(b"empty commit message"))
3340 3345 if unchangedmessagedetection and editortext == templatetext:
3341 3346 raise error.Abort(_(b"commit message unchanged"))
3342 3347
3343 3348 return text
3344 3349
3345 3350
3346 3351 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3347 3352 ui = repo.ui
3348 3353 spec = formatter.templatespec(ref, None, None)
3349 3354 t = logcmdutil.changesettemplater(ui, repo, spec)
3350 3355 t.t.cache.update(
3351 3356 (k, templater.unquotestring(v))
3352 3357 for k, v in repo.ui.configitems(b'committemplate')
3353 3358 )
3354 3359
3355 3360 if not extramsg:
3356 3361 extramsg = b'' # ensure that extramsg is string
3357 3362
3358 3363 ui.pushbuffer()
3359 3364 t.show(ctx, extramsg=extramsg)
3360 3365 return ui.popbuffer()
3361 3366
3362 3367
3363 3368 def hgprefix(msg):
3364 3369 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3365 3370
3366 3371
3367 3372 def buildcommittext(repo, ctx, subs, extramsg):
3368 3373 edittext = []
3369 3374 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3370 3375 if ctx.description():
3371 3376 edittext.append(ctx.description())
3372 3377 edittext.append(b"")
3373 3378 edittext.append(b"") # Empty line between message and comments.
3374 3379 edittext.append(
3375 3380 hgprefix(
3376 3381 _(
3377 3382 b"Enter commit message."
3378 3383 b" Lines beginning with 'HG:' are removed."
3379 3384 )
3380 3385 )
3381 3386 )
3382 3387 edittext.append(hgprefix(extramsg))
3383 3388 edittext.append(b"HG: --")
3384 3389 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3385 3390 if ctx.p2():
3386 3391 edittext.append(hgprefix(_(b"branch merge")))
3387 3392 if ctx.branch():
3388 3393 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3389 3394 if bookmarks.isactivewdirparent(repo):
3390 3395 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3391 3396 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3392 3397 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3393 3398 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3394 3399 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3395 3400 if not added and not modified and not removed:
3396 3401 edittext.append(hgprefix(_(b"no files changed")))
3397 3402 edittext.append(b"")
3398 3403
3399 3404 return b"\n".join(edittext)
3400 3405
3401 3406
3402 3407 def commitstatus(repo, node, branch, bheads=None, opts=None):
3403 3408 if opts is None:
3404 3409 opts = {}
3405 3410 ctx = repo[node]
3406 3411 parents = ctx.parents()
3407 3412
3408 3413 if (
3409 3414 not opts.get(b'amend')
3410 3415 and bheads
3411 3416 and node not in bheads
3412 3417 and not [
3413 3418 x for x in parents if x.node() in bheads and x.branch() == branch
3414 3419 ]
3415 3420 ):
3416 3421 repo.ui.status(_(b'created new head\n'))
3417 3422 # The message is not printed for initial roots. For the other
3418 3423 # changesets, it is printed in the following situations:
3419 3424 #
3420 3425 # Par column: for the 2 parents with ...
3421 3426 # N: null or no parent
3422 3427 # B: parent is on another named branch
3423 3428 # C: parent is a regular non head changeset
3424 3429 # H: parent was a branch head of the current branch
3425 3430 # Msg column: whether we print "created new head" message
3426 3431 # In the following, it is assumed that there already exists some
3427 3432 # initial branch heads of the current branch, otherwise nothing is
3428 3433 # printed anyway.
3429 3434 #
3430 3435 # Par Msg Comment
3431 3436 # N N y additional topo root
3432 3437 #
3433 3438 # B N y additional branch root
3434 3439 # C N y additional topo head
3435 3440 # H N n usual case
3436 3441 #
3437 3442 # B B y weird additional branch root
3438 3443 # C B y branch merge
3439 3444 # H B n merge with named branch
3440 3445 #
3441 3446 # C C y additional head from merge
3442 3447 # C H n merge with a head
3443 3448 #
3444 3449 # H H n head merge: head count decreases
3445 3450
3446 3451 if not opts.get(b'close_branch'):
3447 3452 for r in parents:
3448 3453 if r.closesbranch() and r.branch() == branch:
3449 3454 repo.ui.status(
3450 3455 _(b'reopening closed branch head %d\n') % r.rev()
3451 3456 )
3452 3457
3453 3458 if repo.ui.debugflag:
3454 3459 repo.ui.write(
3455 3460 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3456 3461 )
3457 3462 elif repo.ui.verbose:
3458 3463 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3459 3464
3460 3465
3461 3466 def postcommitstatus(repo, pats, opts):
3462 3467 return repo.status(match=scmutil.match(repo[None], pats, opts))
3463 3468
3464 3469
3465 3470 def revert(ui, repo, ctx, parents, *pats, **opts):
3466 3471 opts = pycompat.byteskwargs(opts)
3467 3472 parent, p2 = parents
3468 3473 node = ctx.node()
3469 3474
3470 3475 mf = ctx.manifest()
3471 3476 if node == p2:
3472 3477 parent = p2
3473 3478
3474 3479 # need all matching names in dirstate and manifest of target rev,
3475 3480 # so have to walk both. do not print errors if files exist in one
3476 3481 # but not other. in both cases, filesets should be evaluated against
3477 3482 # workingctx to get consistent result (issue4497). this means 'set:**'
3478 3483 # cannot be used to select missing files from target rev.
3479 3484
3480 3485 # `names` is a mapping for all elements in working copy and target revision
3481 3486 # The mapping is in the form:
3482 3487 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3483 3488 names = {}
3484 3489 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3485 3490
3486 3491 with repo.wlock():
3487 3492 ## filling of the `names` mapping
3488 3493 # walk dirstate to fill `names`
3489 3494
3490 3495 interactive = opts.get(b'interactive', False)
3491 3496 wctx = repo[None]
3492 3497 m = scmutil.match(wctx, pats, opts)
3493 3498
3494 3499 # we'll need this later
3495 3500 targetsubs = sorted(s for s in wctx.substate if m(s))
3496 3501
3497 3502 if not m.always():
3498 3503 matcher = matchmod.badmatch(m, lambda x, y: False)
3499 3504 for abs in wctx.walk(matcher):
3500 3505 names[abs] = m.exact(abs)
3501 3506
3502 3507 # walk target manifest to fill `names`
3503 3508
3504 3509 def badfn(path, msg):
3505 3510 if path in names:
3506 3511 return
3507 3512 if path in ctx.substate:
3508 3513 return
3509 3514 path_ = path + b'/'
3510 3515 for f in names:
3511 3516 if f.startswith(path_):
3512 3517 return
3513 3518 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3514 3519
3515 3520 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3516 3521 if abs not in names:
3517 3522 names[abs] = m.exact(abs)
3518 3523
3519 3524 # Find status of all file in `names`.
3520 3525 m = scmutil.matchfiles(repo, names)
3521 3526
3522 3527 changes = repo.status(
3523 3528 node1=node, match=m, unknown=True, ignored=True, clean=True
3524 3529 )
3525 3530 else:
3526 3531 changes = repo.status(node1=node, match=m)
3527 3532 for kind in changes:
3528 3533 for abs in kind:
3529 3534 names[abs] = m.exact(abs)
3530 3535
3531 3536 m = scmutil.matchfiles(repo, names)
3532 3537
3533 3538 modified = set(changes.modified)
3534 3539 added = set(changes.added)
3535 3540 removed = set(changes.removed)
3536 3541 _deleted = set(changes.deleted)
3537 3542 unknown = set(changes.unknown)
3538 3543 unknown.update(changes.ignored)
3539 3544 clean = set(changes.clean)
3540 3545 modadded = set()
3541 3546
3542 3547 # We need to account for the state of the file in the dirstate,
3543 3548 # even when we revert against something else than parent. This will
3544 3549 # slightly alter the behavior of revert (doing back up or not, delete
3545 3550 # or just forget etc).
3546 3551 if parent == node:
3547 3552 dsmodified = modified
3548 3553 dsadded = added
3549 3554 dsremoved = removed
3550 3555 # store all local modifications, useful later for rename detection
3551 3556 localchanges = dsmodified | dsadded
3552 3557 modified, added, removed = set(), set(), set()
3553 3558 else:
3554 3559 changes = repo.status(node1=parent, match=m)
3555 3560 dsmodified = set(changes.modified)
3556 3561 dsadded = set(changes.added)
3557 3562 dsremoved = set(changes.removed)
3558 3563 # store all local modifications, useful later for rename detection
3559 3564 localchanges = dsmodified | dsadded
3560 3565
3561 3566 # only take into account for removes between wc and target
3562 3567 clean |= dsremoved - removed
3563 3568 dsremoved &= removed
3564 3569 # distinct between dirstate remove and other
3565 3570 removed -= dsremoved
3566 3571
3567 3572 modadded = added & dsmodified
3568 3573 added -= modadded
3569 3574
3570 3575 # tell newly modified apart.
3571 3576 dsmodified &= modified
3572 3577 dsmodified |= modified & dsadded # dirstate added may need backup
3573 3578 modified -= dsmodified
3574 3579
3575 3580 # We need to wait for some post-processing to update this set
3576 3581 # before making the distinction. The dirstate will be used for
3577 3582 # that purpose.
3578 3583 dsadded = added
3579 3584
3580 3585 # in case of merge, files that are actually added can be reported as
3581 3586 # modified, we need to post process the result
3582 3587 if p2 != nullid:
3583 3588 mergeadd = set(dsmodified)
3584 3589 for path in dsmodified:
3585 3590 if path in mf:
3586 3591 mergeadd.remove(path)
3587 3592 dsadded |= mergeadd
3588 3593 dsmodified -= mergeadd
3589 3594
3590 3595 # if f is a rename, update `names` to also revert the source
3591 3596 for f in localchanges:
3592 3597 src = repo.dirstate.copied(f)
3593 3598 # XXX should we check for rename down to target node?
3594 3599 if src and src not in names and repo.dirstate[src] == b'r':
3595 3600 dsremoved.add(src)
3596 3601 names[src] = True
3597 3602
3598 3603 # determine the exact nature of the deleted changesets
3599 3604 deladded = set(_deleted)
3600 3605 for path in _deleted:
3601 3606 if path in mf:
3602 3607 deladded.remove(path)
3603 3608 deleted = _deleted - deladded
3604 3609
3605 3610 # distinguish between file to forget and the other
3606 3611 added = set()
3607 3612 for abs in dsadded:
3608 3613 if repo.dirstate[abs] != b'a':
3609 3614 added.add(abs)
3610 3615 dsadded -= added
3611 3616
3612 3617 for abs in deladded:
3613 3618 if repo.dirstate[abs] == b'a':
3614 3619 dsadded.add(abs)
3615 3620 deladded -= dsadded
3616 3621
3617 3622 # For files marked as removed, we check if an unknown file is present at
3618 3623 # the same path. If a such file exists it may need to be backed up.
3619 3624 # Making the distinction at this stage helps have simpler backup
3620 3625 # logic.
3621 3626 removunk = set()
3622 3627 for abs in removed:
3623 3628 target = repo.wjoin(abs)
3624 3629 if os.path.lexists(target):
3625 3630 removunk.add(abs)
3626 3631 removed -= removunk
3627 3632
3628 3633 dsremovunk = set()
3629 3634 for abs in dsremoved:
3630 3635 target = repo.wjoin(abs)
3631 3636 if os.path.lexists(target):
3632 3637 dsremovunk.add(abs)
3633 3638 dsremoved -= dsremovunk
3634 3639
3635 3640 # action to be actually performed by revert
3636 3641 # (<list of file>, message>) tuple
3637 3642 actions = {
3638 3643 b'revert': ([], _(b'reverting %s\n')),
3639 3644 b'add': ([], _(b'adding %s\n')),
3640 3645 b'remove': ([], _(b'removing %s\n')),
3641 3646 b'drop': ([], _(b'removing %s\n')),
3642 3647 b'forget': ([], _(b'forgetting %s\n')),
3643 3648 b'undelete': ([], _(b'undeleting %s\n')),
3644 3649 b'noop': (None, _(b'no changes needed to %s\n')),
3645 3650 b'unknown': (None, _(b'file not managed: %s\n')),
3646 3651 }
3647 3652
3648 3653 # "constant" that convey the backup strategy.
3649 3654 # All set to `discard` if `no-backup` is set do avoid checking
3650 3655 # no_backup lower in the code.
3651 3656 # These values are ordered for comparison purposes
3652 3657 backupinteractive = 3 # do backup if interactively modified
3653 3658 backup = 2 # unconditionally do backup
3654 3659 check = 1 # check if the existing file differs from target
3655 3660 discard = 0 # never do backup
3656 3661 if opts.get(b'no_backup'):
3657 3662 backupinteractive = backup = check = discard
3658 3663 if interactive:
3659 3664 dsmodifiedbackup = backupinteractive
3660 3665 else:
3661 3666 dsmodifiedbackup = backup
3662 3667 tobackup = set()
3663 3668
3664 3669 backupanddel = actions[b'remove']
3665 3670 if not opts.get(b'no_backup'):
3666 3671 backupanddel = actions[b'drop']
3667 3672
3668 3673 disptable = (
3669 3674 # dispatch table:
3670 3675 # file state
3671 3676 # action
3672 3677 # make backup
3673 3678 ## Sets that results that will change file on disk
3674 3679 # Modified compared to target, no local change
3675 3680 (modified, actions[b'revert'], discard),
3676 3681 # Modified compared to target, but local file is deleted
3677 3682 (deleted, actions[b'revert'], discard),
3678 3683 # Modified compared to target, local change
3679 3684 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3680 3685 # Added since target
3681 3686 (added, actions[b'remove'], discard),
3682 3687 # Added in working directory
3683 3688 (dsadded, actions[b'forget'], discard),
3684 3689 # Added since target, have local modification
3685 3690 (modadded, backupanddel, backup),
3686 3691 # Added since target but file is missing in working directory
3687 3692 (deladded, actions[b'drop'], discard),
3688 3693 # Removed since target, before working copy parent
3689 3694 (removed, actions[b'add'], discard),
3690 3695 # Same as `removed` but an unknown file exists at the same path
3691 3696 (removunk, actions[b'add'], check),
3692 3697 # Removed since targe, marked as such in working copy parent
3693 3698 (dsremoved, actions[b'undelete'], discard),
3694 3699 # Same as `dsremoved` but an unknown file exists at the same path
3695 3700 (dsremovunk, actions[b'undelete'], check),
3696 3701 ## the following sets does not result in any file changes
3697 3702 # File with no modification
3698 3703 (clean, actions[b'noop'], discard),
3699 3704 # Existing file, not tracked anywhere
3700 3705 (unknown, actions[b'unknown'], discard),
3701 3706 )
3702 3707
3703 3708 for abs, exact in sorted(names.items()):
3704 3709 # target file to be touch on disk (relative to cwd)
3705 3710 target = repo.wjoin(abs)
3706 3711 # search the entry in the dispatch table.
3707 3712 # if the file is in any of these sets, it was touched in the working
3708 3713 # directory parent and we are sure it needs to be reverted.
3709 3714 for table, (xlist, msg), dobackup in disptable:
3710 3715 if abs not in table:
3711 3716 continue
3712 3717 if xlist is not None:
3713 3718 xlist.append(abs)
3714 3719 if dobackup:
3715 3720 # If in interactive mode, don't automatically create
3716 3721 # .orig files (issue4793)
3717 3722 if dobackup == backupinteractive:
3718 3723 tobackup.add(abs)
3719 3724 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3720 3725 absbakname = scmutil.backuppath(ui, repo, abs)
3721 3726 bakname = os.path.relpath(
3722 3727 absbakname, start=repo.root
3723 3728 )
3724 3729 ui.note(
3725 3730 _(b'saving current version of %s as %s\n')
3726 3731 % (uipathfn(abs), uipathfn(bakname))
3727 3732 )
3728 3733 if not opts.get(b'dry_run'):
3729 3734 if interactive:
3730 3735 util.copyfile(target, absbakname)
3731 3736 else:
3732 3737 util.rename(target, absbakname)
3733 3738 if opts.get(b'dry_run'):
3734 3739 if ui.verbose or not exact:
3735 3740 ui.status(msg % uipathfn(abs))
3736 3741 elif exact:
3737 3742 ui.warn(msg % uipathfn(abs))
3738 3743 break
3739 3744
3740 3745 if not opts.get(b'dry_run'):
3741 3746 needdata = (b'revert', b'add', b'undelete')
3742 3747 oplist = [actions[name][0] for name in needdata]
3743 3748 prefetch = scmutil.prefetchfiles
3744 3749 matchfiles = scmutil.matchfiles
3745 3750 prefetch(
3746 3751 repo,
3747 3752 [ctx.rev()],
3748 3753 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3749 3754 )
3750 3755 match = scmutil.match(repo[None], pats)
3751 3756 _performrevert(
3752 3757 repo,
3753 3758 parents,
3754 3759 ctx,
3755 3760 names,
3756 3761 uipathfn,
3757 3762 actions,
3758 3763 match,
3759 3764 interactive,
3760 3765 tobackup,
3761 3766 )
3762 3767
3763 3768 if targetsubs:
3764 3769 # Revert the subrepos on the revert list
3765 3770 for sub in targetsubs:
3766 3771 try:
3767 3772 wctx.sub(sub).revert(
3768 3773 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3769 3774 )
3770 3775 except KeyError:
3771 3776 raise error.Abort(
3772 3777 b"subrepository '%s' does not exist in %s!"
3773 3778 % (sub, short(ctx.node()))
3774 3779 )
3775 3780
3776 3781
3777 3782 def _performrevert(
3778 3783 repo,
3779 3784 parents,
3780 3785 ctx,
3781 3786 names,
3782 3787 uipathfn,
3783 3788 actions,
3784 3789 match,
3785 3790 interactive=False,
3786 3791 tobackup=None,
3787 3792 ):
3788 3793 """function that actually perform all the actions computed for revert
3789 3794
3790 3795 This is an independent function to let extension to plug in and react to
3791 3796 the imminent revert.
3792 3797
3793 3798 Make sure you have the working directory locked when calling this function.
3794 3799 """
3795 3800 parent, p2 = parents
3796 3801 node = ctx.node()
3797 3802 excluded_files = []
3798 3803
3799 3804 def checkout(f):
3800 3805 fc = ctx[f]
3801 3806 repo.wwrite(f, fc.data(), fc.flags())
3802 3807
3803 3808 def doremove(f):
3804 3809 try:
3805 3810 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3806 3811 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3807 3812 except OSError:
3808 3813 pass
3809 3814 repo.dirstate.remove(f)
3810 3815
3811 3816 def prntstatusmsg(action, f):
3812 3817 exact = names[f]
3813 3818 if repo.ui.verbose or not exact:
3814 3819 repo.ui.status(actions[action][1] % uipathfn(f))
3815 3820
3816 3821 audit_path = pathutil.pathauditor(repo.root, cached=True)
3817 3822 for f in actions[b'forget'][0]:
3818 3823 if interactive:
3819 3824 choice = repo.ui.promptchoice(
3820 3825 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3821 3826 )
3822 3827 if choice == 0:
3823 3828 prntstatusmsg(b'forget', f)
3824 3829 repo.dirstate.drop(f)
3825 3830 else:
3826 3831 excluded_files.append(f)
3827 3832 else:
3828 3833 prntstatusmsg(b'forget', f)
3829 3834 repo.dirstate.drop(f)
3830 3835 for f in actions[b'remove'][0]:
3831 3836 audit_path(f)
3832 3837 if interactive:
3833 3838 choice = repo.ui.promptchoice(
3834 3839 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3835 3840 )
3836 3841 if choice == 0:
3837 3842 prntstatusmsg(b'remove', f)
3838 3843 doremove(f)
3839 3844 else:
3840 3845 excluded_files.append(f)
3841 3846 else:
3842 3847 prntstatusmsg(b'remove', f)
3843 3848 doremove(f)
3844 3849 for f in actions[b'drop'][0]:
3845 3850 audit_path(f)
3846 3851 prntstatusmsg(b'drop', f)
3847 3852 repo.dirstate.remove(f)
3848 3853
3849 3854 normal = None
3850 3855 if node == parent:
3851 3856 # We're reverting to our parent. If possible, we'd like status
3852 3857 # to report the file as clean. We have to use normallookup for
3853 3858 # merges to avoid losing information about merged/dirty files.
3854 3859 if p2 != nullid:
3855 3860 normal = repo.dirstate.normallookup
3856 3861 else:
3857 3862 normal = repo.dirstate.normal
3858 3863
3859 3864 newlyaddedandmodifiedfiles = set()
3860 3865 if interactive:
3861 3866 # Prompt the user for changes to revert
3862 3867 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3863 3868 m = scmutil.matchfiles(repo, torevert)
3864 3869 diffopts = patch.difffeatureopts(
3865 3870 repo.ui,
3866 3871 whitespace=True,
3867 3872 section=b'commands',
3868 3873 configprefix=b'revert.interactive.',
3869 3874 )
3870 3875 diffopts.nodates = True
3871 3876 diffopts.git = True
3872 3877 operation = b'apply'
3873 3878 if node == parent:
3874 3879 if repo.ui.configbool(
3875 3880 b'experimental', b'revert.interactive.select-to-keep'
3876 3881 ):
3877 3882 operation = b'keep'
3878 3883 else:
3879 3884 operation = b'discard'
3880 3885
3881 3886 if operation == b'apply':
3882 3887 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3883 3888 else:
3884 3889 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3885 3890 originalchunks = patch.parsepatch(diff)
3886 3891
3887 3892 try:
3888 3893
3889 3894 chunks, opts = recordfilter(
3890 3895 repo.ui, originalchunks, match, operation=operation
3891 3896 )
3892 3897 if operation == b'discard':
3893 3898 chunks = patch.reversehunks(chunks)
3894 3899
3895 3900 except error.PatchError as err:
3896 3901 raise error.Abort(_(b'error parsing patch: %s') % err)
3897 3902
3898 3903 # FIXME: when doing an interactive revert of a copy, there's no way of
3899 3904 # performing a partial revert of the added file, the only option is
3900 3905 # "remove added file <name> (Yn)?", so we don't need to worry about the
3901 3906 # alsorestore value. Ideally we'd be able to partially revert
3902 3907 # copied/renamed files.
3903 3908 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3904 3909 chunks, originalchunks
3905 3910 )
3906 3911 if tobackup is None:
3907 3912 tobackup = set()
3908 3913 # Apply changes
3909 3914 fp = stringio()
3910 3915 # chunks are serialized per file, but files aren't sorted
3911 3916 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3912 3917 prntstatusmsg(b'revert', f)
3913 3918 files = set()
3914 3919 for c in chunks:
3915 3920 if ishunk(c):
3916 3921 abs = c.header.filename()
3917 3922 # Create a backup file only if this hunk should be backed up
3918 3923 if c.header.filename() in tobackup:
3919 3924 target = repo.wjoin(abs)
3920 3925 bakname = scmutil.backuppath(repo.ui, repo, abs)
3921 3926 util.copyfile(target, bakname)
3922 3927 tobackup.remove(abs)
3923 3928 if abs not in files:
3924 3929 files.add(abs)
3925 3930 if operation == b'keep':
3926 3931 checkout(abs)
3927 3932 c.write(fp)
3928 3933 dopatch = fp.tell()
3929 3934 fp.seek(0)
3930 3935 if dopatch:
3931 3936 try:
3932 3937 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3933 3938 except error.PatchError as err:
3934 3939 raise error.Abort(pycompat.bytestr(err))
3935 3940 del fp
3936 3941 else:
3937 3942 for f in actions[b'revert'][0]:
3938 3943 prntstatusmsg(b'revert', f)
3939 3944 checkout(f)
3940 3945 if normal:
3941 3946 normal(f)
3942 3947
3943 3948 for f in actions[b'add'][0]:
3944 3949 # Don't checkout modified files, they are already created by the diff
3945 3950 if f not in newlyaddedandmodifiedfiles:
3946 3951 prntstatusmsg(b'add', f)
3947 3952 checkout(f)
3948 3953 repo.dirstate.add(f)
3949 3954
3950 3955 normal = repo.dirstate.normallookup
3951 3956 if node == parent and p2 == nullid:
3952 3957 normal = repo.dirstate.normal
3953 3958 for f in actions[b'undelete'][0]:
3954 3959 if interactive:
3955 3960 choice = repo.ui.promptchoice(
3956 3961 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3957 3962 )
3958 3963 if choice == 0:
3959 3964 prntstatusmsg(b'undelete', f)
3960 3965 checkout(f)
3961 3966 normal(f)
3962 3967 else:
3963 3968 excluded_files.append(f)
3964 3969 else:
3965 3970 prntstatusmsg(b'undelete', f)
3966 3971 checkout(f)
3967 3972 normal(f)
3968 3973
3969 3974 copied = copies.pathcopies(repo[parent], ctx)
3970 3975
3971 3976 for f in (
3972 3977 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3973 3978 ):
3974 3979 if f in copied:
3975 3980 repo.dirstate.copy(copied[f], f)
3976 3981
3977 3982
3978 3983 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3979 3984 # commands.outgoing. "missing" is "missing" of the result of
3980 3985 # "findcommonoutgoing()"
3981 3986 outgoinghooks = util.hooks()
3982 3987
3983 3988 # a list of (ui, repo) functions called by commands.summary
3984 3989 summaryhooks = util.hooks()
3985 3990
3986 3991 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3987 3992 #
3988 3993 # functions should return tuple of booleans below, if 'changes' is None:
3989 3994 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3990 3995 #
3991 3996 # otherwise, 'changes' is a tuple of tuples below:
3992 3997 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3993 3998 # - (desturl, destbranch, destpeer, outgoing)
3994 3999 summaryremotehooks = util.hooks()
3995 4000
3996 4001
3997 4002 def checkunfinished(repo, commit=False, skipmerge=False):
3998 4003 '''Look for an unfinished multistep operation, like graft, and abort
3999 4004 if found. It's probably good to check this right before
4000 4005 bailifchanged().
4001 4006 '''
4002 4007 # Check for non-clearable states first, so things like rebase will take
4003 4008 # precedence over update.
4004 4009 for state in statemod._unfinishedstates:
4005 4010 if (
4006 4011 state._clearable
4007 4012 or (commit and state._allowcommit)
4008 4013 or state._reportonly
4009 4014 ):
4010 4015 continue
4011 4016 if state.isunfinished(repo):
4012 4017 raise error.Abort(state.msg(), hint=state.hint())
4013 4018
4014 4019 for s in statemod._unfinishedstates:
4015 4020 if (
4016 4021 not s._clearable
4017 4022 or (commit and s._allowcommit)
4018 4023 or (s._opname == b'merge' and skipmerge)
4019 4024 or s._reportonly
4020 4025 ):
4021 4026 continue
4022 4027 if s.isunfinished(repo):
4023 4028 raise error.Abort(s.msg(), hint=s.hint())
4024 4029
4025 4030
4026 4031 def clearunfinished(repo):
4027 4032 '''Check for unfinished operations (as above), and clear the ones
4028 4033 that are clearable.
4029 4034 '''
4030 4035 for state in statemod._unfinishedstates:
4031 4036 if state._reportonly:
4032 4037 continue
4033 4038 if not state._clearable and state.isunfinished(repo):
4034 4039 raise error.Abort(state.msg(), hint=state.hint())
4035 4040
4036 4041 for s in statemod._unfinishedstates:
4037 4042 if s._opname == b'merge' or state._reportonly:
4038 4043 continue
4039 4044 if s._clearable and s.isunfinished(repo):
4040 4045 util.unlink(repo.vfs.join(s._fname))
4041 4046
4042 4047
4043 4048 def getunfinishedstate(repo):
4044 4049 ''' Checks for unfinished operations and returns statecheck object
4045 4050 for it'''
4046 4051 for state in statemod._unfinishedstates:
4047 4052 if state.isunfinished(repo):
4048 4053 return state
4049 4054 return None
4050 4055
4051 4056
4052 4057 def howtocontinue(repo):
4053 4058 '''Check for an unfinished operation and return the command to finish
4054 4059 it.
4055 4060
4056 4061 statemod._unfinishedstates list is checked for an unfinished operation
4057 4062 and the corresponding message to finish it is generated if a method to
4058 4063 continue is supported by the operation.
4059 4064
4060 4065 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4061 4066 a boolean.
4062 4067 '''
4063 4068 contmsg = _(b"continue: %s")
4064 4069 for state in statemod._unfinishedstates:
4065 4070 if not state._continueflag:
4066 4071 continue
4067 4072 if state.isunfinished(repo):
4068 4073 return contmsg % state.continuemsg(), True
4069 4074 if repo[None].dirty(missing=True, merge=False, branch=False):
4070 4075 return contmsg % _(b"hg commit"), False
4071 4076 return None, None
4072 4077
4073 4078
4074 4079 def checkafterresolved(repo):
4075 4080 '''Inform the user about the next action after completing hg resolve
4076 4081
4077 4082 If there's a an unfinished operation that supports continue flag,
4078 4083 howtocontinue will yield repo.ui.warn as the reporter.
4079 4084
4080 4085 Otherwise, it will yield repo.ui.note.
4081 4086 '''
4082 4087 msg, warning = howtocontinue(repo)
4083 4088 if msg is not None:
4084 4089 if warning:
4085 4090 repo.ui.warn(b"%s\n" % msg)
4086 4091 else:
4087 4092 repo.ui.note(b"%s\n" % msg)
4088 4093
4089 4094
4090 4095 def wrongtooltocontinue(repo, task):
4091 4096 '''Raise an abort suggesting how to properly continue if there is an
4092 4097 active task.
4093 4098
4094 4099 Uses howtocontinue() to find the active task.
4095 4100
4096 4101 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4097 4102 a hint.
4098 4103 '''
4099 4104 after = howtocontinue(repo)
4100 4105 hint = None
4101 4106 if after[1]:
4102 4107 hint = after[0]
4103 4108 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4104 4109
4105 4110
4106 4111 def abortgraft(ui, repo, graftstate):
4107 4112 """abort the interrupted graft and rollbacks to the state before interrupted
4108 4113 graft"""
4109 4114 if not graftstate.exists():
4110 4115 raise error.Abort(_(b"no interrupted graft to abort"))
4111 4116 statedata = readgraftstate(repo, graftstate)
4112 4117 newnodes = statedata.get(b'newnodes')
4113 4118 if newnodes is None:
4114 4119 # and old graft state which does not have all the data required to abort
4115 4120 # the graft
4116 4121 raise error.Abort(_(b"cannot abort using an old graftstate"))
4117 4122
4118 4123 # changeset from which graft operation was started
4119 4124 if len(newnodes) > 0:
4120 4125 startctx = repo[newnodes[0]].p1()
4121 4126 else:
4122 4127 startctx = repo[b'.']
4123 4128 # whether to strip or not
4124 4129 cleanup = False
4125 4130 from . import hg
4126 4131
4127 4132 if newnodes:
4128 4133 newnodes = [repo[r].rev() for r in newnodes]
4129 4134 cleanup = True
4130 4135 # checking that none of the newnodes turned public or is public
4131 4136 immutable = [c for c in newnodes if not repo[c].mutable()]
4132 4137 if immutable:
4133 4138 repo.ui.warn(
4134 4139 _(b"cannot clean up public changesets %s\n")
4135 4140 % b', '.join(bytes(repo[r]) for r in immutable),
4136 4141 hint=_(b"see 'hg help phases' for details"),
4137 4142 )
4138 4143 cleanup = False
4139 4144
4140 4145 # checking that no new nodes are created on top of grafted revs
4141 4146 desc = set(repo.changelog.descendants(newnodes))
4142 4147 if desc - set(newnodes):
4143 4148 repo.ui.warn(
4144 4149 _(
4145 4150 b"new changesets detected on destination "
4146 4151 b"branch, can't strip\n"
4147 4152 )
4148 4153 )
4149 4154 cleanup = False
4150 4155
4151 4156 if cleanup:
4152 4157 with repo.wlock(), repo.lock():
4153 4158 hg.updaterepo(repo, startctx.node(), overwrite=True)
4154 4159 # stripping the new nodes created
4155 4160 strippoints = [
4156 4161 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4157 4162 ]
4158 4163 repair.strip(repo.ui, repo, strippoints, backup=False)
4159 4164
4160 4165 if not cleanup:
4161 4166 # we don't update to the startnode if we can't strip
4162 4167 startctx = repo[b'.']
4163 4168 hg.updaterepo(repo, startctx.node(), overwrite=True)
4164 4169
4165 4170 ui.status(_(b"graft aborted\n"))
4166 4171 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4167 4172 graftstate.delete()
4168 4173 return 0
4169 4174
4170 4175
4171 4176 def readgraftstate(repo, graftstate):
4172 4177 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4173 4178 """read the graft state file and return a dict of the data stored in it"""
4174 4179 try:
4175 4180 return graftstate.read()
4176 4181 except error.CorruptedState:
4177 4182 nodes = repo.vfs.read(b'graftstate').splitlines()
4178 4183 return {b'nodes': nodes}
4179 4184
4180 4185
4181 4186 def hgabortgraft(ui, repo):
4182 4187 """ abort logic for aborting graft using 'hg abort'"""
4183 4188 with repo.wlock():
4184 4189 graftstate = statemod.cmdstate(repo, b'graftstate')
4185 4190 return abortgraft(ui, repo, graftstate)
@@ -1,731 +1,731 b''
1 1 # commandserver.py - communicate with Mercurial's API over a pipe
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import gc
12 12 import os
13 13 import random
14 14 import signal
15 15 import socket
16 16 import struct
17 17 import traceback
18 18
19 19 try:
20 20 import selectors
21 21
22 22 selectors.BaseSelector
23 23 except ImportError:
24 24 from .thirdparty import selectors2 as selectors
25 25
26 26 from .i18n import _
27 27 from .pycompat import getattr
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 loggingutil,
32 32 pycompat,
33 33 repocache,
34 34 util,
35 35 vfs as vfsmod,
36 36 )
37 37 from .utils import (
38 38 cborutil,
39 39 procutil,
40 40 )
41 41
42 42
43 43 class channeledoutput(object):
44 44 """
45 45 Write data to out in the following format:
46 46
47 47 data length (unsigned int),
48 48 data
49 49 """
50 50
51 51 def __init__(self, out, channel):
52 52 self.out = out
53 53 self.channel = channel
54 54
55 55 @property
56 56 def name(self):
57 57 return b'<%c-channel>' % self.channel
58 58
59 59 def write(self, data):
60 60 if not data:
61 61 return
62 62 # single write() to guarantee the same atomicity as the underlying file
63 63 self.out.write(struct.pack(b'>cI', self.channel, len(data)) + data)
64 64 self.out.flush()
65 65
66 66 def __getattr__(self, attr):
67 67 if attr in ('isatty', 'fileno', 'tell', 'seek'):
68 68 raise AttributeError(attr)
69 69 return getattr(self.out, attr)
70 70
71 71
72 72 class channeledmessage(object):
73 73 """
74 74 Write encoded message and metadata to out in the following format:
75 75
76 76 data length (unsigned int),
77 77 encoded message and metadata, as a flat key-value dict.
78 78
79 79 Each message should have 'type' attribute. Messages of unknown type
80 80 should be ignored.
81 81 """
82 82
83 83 # teach ui that write() can take **opts
84 84 structured = True
85 85
86 86 def __init__(self, out, channel, encodename, encodefn):
87 87 self._cout = channeledoutput(out, channel)
88 88 self.encoding = encodename
89 89 self._encodefn = encodefn
90 90
91 91 def write(self, data, **opts):
92 92 opts = pycompat.byteskwargs(opts)
93 93 if data is not None:
94 94 opts[b'data'] = data
95 95 self._cout.write(self._encodefn(opts))
96 96
97 97 def __getattr__(self, attr):
98 98 return getattr(self._cout, attr)
99 99
100 100
101 101 class channeledinput(object):
102 102 """
103 103 Read data from in_.
104 104
105 105 Requests for input are written to out in the following format:
106 106 channel identifier - 'I' for plain input, 'L' line based (1 byte)
107 107 how many bytes to send at most (unsigned int),
108 108
109 109 The client replies with:
110 110 data length (unsigned int), 0 meaning EOF
111 111 data
112 112 """
113 113
114 114 maxchunksize = 4 * 1024
115 115
116 116 def __init__(self, in_, out, channel):
117 117 self.in_ = in_
118 118 self.out = out
119 119 self.channel = channel
120 120
121 121 @property
122 122 def name(self):
123 123 return b'<%c-channel>' % self.channel
124 124
125 125 def read(self, size=-1):
126 126 if size < 0:
127 127 # if we need to consume all the clients input, ask for 4k chunks
128 128 # so the pipe doesn't fill up risking a deadlock
129 129 size = self.maxchunksize
130 130 s = self._read(size, self.channel)
131 131 buf = s
132 132 while s:
133 133 s = self._read(size, self.channel)
134 134 buf += s
135 135
136 136 return buf
137 137 else:
138 138 return self._read(size, self.channel)
139 139
140 140 def _read(self, size, channel):
141 141 if not size:
142 142 return b''
143 143 assert size > 0
144 144
145 145 # tell the client we need at most size bytes
146 146 self.out.write(struct.pack(b'>cI', channel, size))
147 147 self.out.flush()
148 148
149 149 length = self.in_.read(4)
150 150 length = struct.unpack(b'>I', length)[0]
151 151 if not length:
152 152 return b''
153 153 else:
154 154 return self.in_.read(length)
155 155
156 156 def readline(self, size=-1):
157 157 if size < 0:
158 158 size = self.maxchunksize
159 159 s = self._read(size, b'L')
160 160 buf = s
161 161 # keep asking for more until there's either no more or
162 162 # we got a full line
163 while s and s[-1] != b'\n':
163 while s and not s.endswith(b'\n'):
164 164 s = self._read(size, b'L')
165 165 buf += s
166 166
167 167 return buf
168 168 else:
169 169 return self._read(size, b'L')
170 170
171 171 def __iter__(self):
172 172 return self
173 173
174 174 def next(self):
175 175 l = self.readline()
176 176 if not l:
177 177 raise StopIteration
178 178 return l
179 179
180 180 __next__ = next
181 181
182 182 def __getattr__(self, attr):
183 183 if attr in ('isatty', 'fileno', 'tell', 'seek'):
184 184 raise AttributeError(attr)
185 185 return getattr(self.in_, attr)
186 186
187 187
188 188 _messageencoders = {
189 189 b'cbor': lambda v: b''.join(cborutil.streamencode(v)),
190 190 }
191 191
192 192
193 193 def _selectmessageencoder(ui):
194 194 # experimental config: cmdserver.message-encodings
195 195 encnames = ui.configlist(b'cmdserver', b'message-encodings')
196 196 for n in encnames:
197 197 f = _messageencoders.get(n)
198 198 if f:
199 199 return n, f
200 200 raise error.Abort(
201 201 b'no supported message encodings: %s' % b' '.join(encnames)
202 202 )
203 203
204 204
205 205 class server(object):
206 206 """
207 207 Listens for commands on fin, runs them and writes the output on a channel
208 208 based stream to fout.
209 209 """
210 210
211 211 def __init__(self, ui, repo, fin, fout, prereposetups=None):
212 212 self.cwd = encoding.getcwd()
213 213
214 214 if repo:
215 215 # the ui here is really the repo ui so take its baseui so we don't
216 216 # end up with its local configuration
217 217 self.ui = repo.baseui
218 218 self.repo = repo
219 219 self.repoui = repo.ui
220 220 else:
221 221 self.ui = ui
222 222 self.repo = self.repoui = None
223 223 self._prereposetups = prereposetups
224 224
225 225 self.cdebug = channeledoutput(fout, b'd')
226 226 self.cerr = channeledoutput(fout, b'e')
227 227 self.cout = channeledoutput(fout, b'o')
228 228 self.cin = channeledinput(fin, fout, b'I')
229 229 self.cresult = channeledoutput(fout, b'r')
230 230
231 231 if self.ui.config(b'cmdserver', b'log') == b'-':
232 232 # switch log stream of server's ui to the 'd' (debug) channel
233 233 # (don't touch repo.ui as its lifetime is longer than the server)
234 234 self.ui = self.ui.copy()
235 235 setuplogging(self.ui, repo=None, fp=self.cdebug)
236 236
237 237 # TODO: add this to help/config.txt when stabilized
238 238 # ``channel``
239 239 # Use separate channel for structured output. (Command-server only)
240 240 self.cmsg = None
241 241 if ui.config(b'ui', b'message-output') == b'channel':
242 242 encname, encfn = _selectmessageencoder(ui)
243 243 self.cmsg = channeledmessage(fout, b'm', encname, encfn)
244 244
245 245 self.client = fin
246 246
247 247 def cleanup(self):
248 248 """release and restore resources taken during server session"""
249 249
250 250 def _read(self, size):
251 251 if not size:
252 252 return b''
253 253
254 254 data = self.client.read(size)
255 255
256 256 # is the other end closed?
257 257 if not data:
258 258 raise EOFError
259 259
260 260 return data
261 261
262 262 def _readstr(self):
263 263 """read a string from the channel
264 264
265 265 format:
266 266 data length (uint32), data
267 267 """
268 268 length = struct.unpack(b'>I', self._read(4))[0]
269 269 if not length:
270 270 return b''
271 271 return self._read(length)
272 272
273 273 def _readlist(self):
274 274 """read a list of NULL separated strings from the channel"""
275 275 s = self._readstr()
276 276 if s:
277 277 return s.split(b'\0')
278 278 else:
279 279 return []
280 280
281 281 def runcommand(self):
282 282 """ reads a list of \0 terminated arguments, executes
283 283 and writes the return code to the result channel """
284 284 from . import dispatch # avoid cycle
285 285
286 286 args = self._readlist()
287 287
288 288 # copy the uis so changes (e.g. --config or --verbose) don't
289 289 # persist between requests
290 290 copiedui = self.ui.copy()
291 291 uis = [copiedui]
292 292 if self.repo:
293 293 self.repo.baseui = copiedui
294 294 # clone ui without using ui.copy because this is protected
295 295 repoui = self.repoui.__class__(self.repoui)
296 296 repoui.copy = copiedui.copy # redo copy protection
297 297 uis.append(repoui)
298 298 self.repo.ui = self.repo.dirstate._ui = repoui
299 299 self.repo.invalidateall()
300 300
301 301 for ui in uis:
302 302 ui.resetstate()
303 303 # any kind of interaction must use server channels, but chg may
304 304 # replace channels by fully functional tty files. so nontty is
305 305 # enforced only if cin is a channel.
306 306 if not util.safehasattr(self.cin, b'fileno'):
307 307 ui.setconfig(b'ui', b'nontty', b'true', b'commandserver')
308 308
309 309 req = dispatch.request(
310 310 args[:],
311 311 copiedui,
312 312 self.repo,
313 313 self.cin,
314 314 self.cout,
315 315 self.cerr,
316 316 self.cmsg,
317 317 prereposetups=self._prereposetups,
318 318 )
319 319
320 320 try:
321 321 ret = dispatch.dispatch(req) & 255
322 322 self.cresult.write(struct.pack(b'>i', int(ret)))
323 323 finally:
324 324 # restore old cwd
325 325 if b'--cwd' in args:
326 326 os.chdir(self.cwd)
327 327
328 328 def getencoding(self):
329 329 """ writes the current encoding to the result channel """
330 330 self.cresult.write(encoding.encoding)
331 331
332 332 def serveone(self):
333 333 cmd = self.client.readline()[:-1]
334 334 if cmd:
335 335 handler = self.capabilities.get(cmd)
336 336 if handler:
337 337 handler(self)
338 338 else:
339 339 # clients are expected to check what commands are supported by
340 340 # looking at the servers capabilities
341 341 raise error.Abort(_(b'unknown command %s') % cmd)
342 342
343 343 return cmd != b''
344 344
345 345 capabilities = {b'runcommand': runcommand, b'getencoding': getencoding}
346 346
347 347 def serve(self):
348 348 hellomsg = b'capabilities: ' + b' '.join(sorted(self.capabilities))
349 349 hellomsg += b'\n'
350 350 hellomsg += b'encoding: ' + encoding.encoding
351 351 hellomsg += b'\n'
352 352 if self.cmsg:
353 353 hellomsg += b'message-encoding: %s\n' % self.cmsg.encoding
354 354 hellomsg += b'pid: %d' % procutil.getpid()
355 355 if util.safehasattr(os, b'getpgid'):
356 356 hellomsg += b'\n'
357 357 hellomsg += b'pgid: %d' % os.getpgid(0)
358 358
359 359 # write the hello msg in -one- chunk
360 360 self.cout.write(hellomsg)
361 361
362 362 try:
363 363 while self.serveone():
364 364 pass
365 365 except EOFError:
366 366 # we'll get here if the client disconnected while we were reading
367 367 # its request
368 368 return 1
369 369
370 370 return 0
371 371
372 372
373 373 def setuplogging(ui, repo=None, fp=None):
374 374 """Set up server logging facility
375 375
376 376 If cmdserver.log is '-', log messages will be sent to the given fp.
377 377 It should be the 'd' channel while a client is connected, and otherwise
378 378 is the stderr of the server process.
379 379 """
380 380 # developer config: cmdserver.log
381 381 logpath = ui.config(b'cmdserver', b'log')
382 382 if not logpath:
383 383 return
384 384 # developer config: cmdserver.track-log
385 385 tracked = set(ui.configlist(b'cmdserver', b'track-log'))
386 386
387 387 if logpath == b'-' and fp:
388 388 logger = loggingutil.fileobjectlogger(fp, tracked)
389 389 elif logpath == b'-':
390 390 logger = loggingutil.fileobjectlogger(ui.ferr, tracked)
391 391 else:
392 392 logpath = os.path.abspath(util.expandpath(logpath))
393 393 # developer config: cmdserver.max-log-files
394 394 maxfiles = ui.configint(b'cmdserver', b'max-log-files')
395 395 # developer config: cmdserver.max-log-size
396 396 maxsize = ui.configbytes(b'cmdserver', b'max-log-size')
397 397 vfs = vfsmod.vfs(os.path.dirname(logpath))
398 398 logger = loggingutil.filelogger(
399 399 vfs,
400 400 os.path.basename(logpath),
401 401 tracked,
402 402 maxfiles=maxfiles,
403 403 maxsize=maxsize,
404 404 )
405 405
406 406 targetuis = {ui}
407 407 if repo:
408 408 targetuis.add(repo.baseui)
409 409 targetuis.add(repo.ui)
410 410 for u in targetuis:
411 411 u.setlogger(b'cmdserver', logger)
412 412
413 413
414 414 class pipeservice(object):
415 415 def __init__(self, ui, repo, opts):
416 416 self.ui = ui
417 417 self.repo = repo
418 418
419 419 def init(self):
420 420 pass
421 421
422 422 def run(self):
423 423 ui = self.ui
424 424 # redirect stdio to null device so that broken extensions or in-process
425 425 # hooks will never cause corruption of channel protocol.
426 426 with ui.protectedfinout() as (fin, fout):
427 427 sv = server(ui, self.repo, fin, fout)
428 428 try:
429 429 return sv.serve()
430 430 finally:
431 431 sv.cleanup()
432 432
433 433
434 434 def _initworkerprocess():
435 435 # use a different process group from the master process, in order to:
436 436 # 1. make the current process group no longer "orphaned" (because the
437 437 # parent of this process is in a different process group while
438 438 # remains in a same session)
439 439 # according to POSIX 2.2.2.52, orphaned process group will ignore
440 440 # terminal-generated stop signals like SIGTSTP (Ctrl+Z), which will
441 441 # cause trouble for things like ncurses.
442 442 # 2. the client can use kill(-pgid, sig) to simulate terminal-generated
443 443 # SIGINT (Ctrl+C) and process-exit-generated SIGHUP. our child
444 444 # processes like ssh will be killed properly, without affecting
445 445 # unrelated processes.
446 446 os.setpgid(0, 0)
447 447 # change random state otherwise forked request handlers would have a
448 448 # same state inherited from parent.
449 449 random.seed()
450 450
451 451
452 452 def _serverequest(ui, repo, conn, createcmdserver, prereposetups):
453 453 fin = conn.makefile('rb')
454 454 fout = conn.makefile('wb')
455 455 sv = None
456 456 try:
457 457 sv = createcmdserver(repo, conn, fin, fout, prereposetups)
458 458 try:
459 459 sv.serve()
460 460 # handle exceptions that may be raised by command server. most of
461 461 # known exceptions are caught by dispatch.
462 462 except error.Abort as inst:
463 463 ui.error(_(b'abort: %s\n') % inst)
464 464 except IOError as inst:
465 465 if inst.errno != errno.EPIPE:
466 466 raise
467 467 except KeyboardInterrupt:
468 468 pass
469 469 finally:
470 470 sv.cleanup()
471 471 except: # re-raises
472 472 # also write traceback to error channel. otherwise client cannot
473 473 # see it because it is written to server's stderr by default.
474 474 if sv:
475 475 cerr = sv.cerr
476 476 else:
477 477 cerr = channeledoutput(fout, b'e')
478 478 cerr.write(encoding.strtolocal(traceback.format_exc()))
479 479 raise
480 480 finally:
481 481 fin.close()
482 482 try:
483 483 fout.close() # implicit flush() may cause another EPIPE
484 484 except IOError as inst:
485 485 if inst.errno != errno.EPIPE:
486 486 raise
487 487
488 488
489 489 class unixservicehandler(object):
490 490 """Set of pluggable operations for unix-mode services
491 491
492 492 Almost all methods except for createcmdserver() are called in the main
493 493 process. You can't pass mutable resource back from createcmdserver().
494 494 """
495 495
496 496 pollinterval = None
497 497
498 498 def __init__(self, ui):
499 499 self.ui = ui
500 500
501 501 def bindsocket(self, sock, address):
502 502 util.bindunixsocket(sock, address)
503 503 sock.listen(socket.SOMAXCONN)
504 504 self.ui.status(_(b'listening at %s\n') % address)
505 505 self.ui.flush() # avoid buffering of status message
506 506
507 507 def unlinksocket(self, address):
508 508 os.unlink(address)
509 509
510 510 def shouldexit(self):
511 511 """True if server should shut down; checked per pollinterval"""
512 512 return False
513 513
514 514 def newconnection(self):
515 515 """Called when main process notices new connection"""
516 516
517 517 def createcmdserver(self, repo, conn, fin, fout, prereposetups):
518 518 """Create new command server instance; called in the process that
519 519 serves for the current connection"""
520 520 return server(self.ui, repo, fin, fout, prereposetups)
521 521
522 522
523 523 class unixforkingservice(object):
524 524 """
525 525 Listens on unix domain socket and forks server per connection
526 526 """
527 527
528 528 def __init__(self, ui, repo, opts, handler=None):
529 529 self.ui = ui
530 530 self.repo = repo
531 531 self.address = opts[b'address']
532 532 if not util.safehasattr(socket, b'AF_UNIX'):
533 533 raise error.Abort(_(b'unsupported platform'))
534 534 if not self.address:
535 535 raise error.Abort(_(b'no socket path specified with --address'))
536 536 self._servicehandler = handler or unixservicehandler(ui)
537 537 self._sock = None
538 538 self._mainipc = None
539 539 self._workeripc = None
540 540 self._oldsigchldhandler = None
541 541 self._workerpids = set() # updated by signal handler; do not iterate
542 542 self._socketunlinked = None
543 543 # experimental config: cmdserver.max-repo-cache
544 544 maxlen = ui.configint(b'cmdserver', b'max-repo-cache')
545 545 if maxlen < 0:
546 546 raise error.Abort(_(b'negative max-repo-cache size not allowed'))
547 547 self._repoloader = repocache.repoloader(ui, maxlen)
548 548 # attempt to avoid crash in CoreFoundation when using chg after fix in
549 549 # a89381e04c58
550 550 if pycompat.isdarwin:
551 551 procutil.gui()
552 552
553 553 def init(self):
554 554 self._sock = socket.socket(socket.AF_UNIX)
555 555 # IPC channel from many workers to one main process; this is actually
556 556 # a uni-directional pipe, but is backed by a DGRAM socket so each
557 557 # message can be easily separated.
558 558 o = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM)
559 559 self._mainipc, self._workeripc = o
560 560 self._servicehandler.bindsocket(self._sock, self.address)
561 561 if util.safehasattr(procutil, b'unblocksignal'):
562 562 procutil.unblocksignal(signal.SIGCHLD)
563 563 o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
564 564 self._oldsigchldhandler = o
565 565 self._socketunlinked = False
566 566 self._repoloader.start()
567 567
568 568 def _unlinksocket(self):
569 569 if not self._socketunlinked:
570 570 self._servicehandler.unlinksocket(self.address)
571 571 self._socketunlinked = True
572 572
573 573 def _cleanup(self):
574 574 signal.signal(signal.SIGCHLD, self._oldsigchldhandler)
575 575 self._sock.close()
576 576 self._mainipc.close()
577 577 self._workeripc.close()
578 578 self._unlinksocket()
579 579 self._repoloader.stop()
580 580 # don't kill child processes as they have active clients, just wait
581 581 self._reapworkers(0)
582 582
583 583 def run(self):
584 584 try:
585 585 self._mainloop()
586 586 finally:
587 587 self._cleanup()
588 588
589 589 def _mainloop(self):
590 590 exiting = False
591 591 h = self._servicehandler
592 592 selector = selectors.DefaultSelector()
593 593 selector.register(
594 594 self._sock, selectors.EVENT_READ, self._acceptnewconnection
595 595 )
596 596 selector.register(
597 597 self._mainipc, selectors.EVENT_READ, self._handlemainipc
598 598 )
599 599 while True:
600 600 if not exiting and h.shouldexit():
601 601 # clients can no longer connect() to the domain socket, so
602 602 # we stop queuing new requests.
603 603 # for requests that are queued (connect()-ed, but haven't been
604 604 # accept()-ed), handle them before exit. otherwise, clients
605 605 # waiting for recv() will receive ECONNRESET.
606 606 self._unlinksocket()
607 607 exiting = True
608 608 try:
609 609 events = selector.select(timeout=h.pollinterval)
610 610 except OSError as inst:
611 611 # selectors2 raises ETIMEDOUT if timeout exceeded while
612 612 # handling signal interrupt. That's probably wrong, but
613 613 # we can easily get around it.
614 614 if inst.errno != errno.ETIMEDOUT:
615 615 raise
616 616 events = []
617 617 if not events:
618 618 # only exit if we completed all queued requests
619 619 if exiting:
620 620 break
621 621 continue
622 622 for key, _mask in events:
623 623 key.data(key.fileobj, selector)
624 624 selector.close()
625 625
626 626 def _acceptnewconnection(self, sock, selector):
627 627 h = self._servicehandler
628 628 try:
629 629 conn, _addr = sock.accept()
630 630 except socket.error as inst:
631 631 if inst.args[0] == errno.EINTR:
632 632 return
633 633 raise
634 634
635 635 # Future improvement: On Python 3.7, maybe gc.freeze() can be used
636 636 # to prevent COW memory from being touched by GC.
637 637 # https://instagram-engineering.com/
638 638 # copy-on-write-friendly-python-garbage-collection-ad6ed5233ddf
639 639 pid = os.fork()
640 640 if pid:
641 641 try:
642 642 self.ui.log(
643 643 b'cmdserver', b'forked worker process (pid=%d)\n', pid
644 644 )
645 645 self._workerpids.add(pid)
646 646 h.newconnection()
647 647 finally:
648 648 conn.close() # release handle in parent process
649 649 else:
650 650 try:
651 651 selector.close()
652 652 sock.close()
653 653 self._mainipc.close()
654 654 self._runworker(conn)
655 655 conn.close()
656 656 self._workeripc.close()
657 657 os._exit(0)
658 658 except: # never return, hence no re-raises
659 659 try:
660 660 self.ui.traceback(force=True)
661 661 finally:
662 662 os._exit(255)
663 663
664 664 def _handlemainipc(self, sock, selector):
665 665 """Process messages sent from a worker"""
666 666 try:
667 667 path = sock.recv(32768) # large enough to receive path
668 668 except socket.error as inst:
669 669 if inst.args[0] == errno.EINTR:
670 670 return
671 671 raise
672 672 self._repoloader.load(path)
673 673
674 674 def _sigchldhandler(self, signal, frame):
675 675 self._reapworkers(os.WNOHANG)
676 676
677 677 def _reapworkers(self, options):
678 678 while self._workerpids:
679 679 try:
680 680 pid, _status = os.waitpid(-1, options)
681 681 except OSError as inst:
682 682 if inst.errno == errno.EINTR:
683 683 continue
684 684 if inst.errno != errno.ECHILD:
685 685 raise
686 686 # no child processes at all (reaped by other waitpid()?)
687 687 self._workerpids.clear()
688 688 return
689 689 if pid == 0:
690 690 # no waitable child processes
691 691 return
692 692 self.ui.log(b'cmdserver', b'worker process exited (pid=%d)\n', pid)
693 693 self._workerpids.discard(pid)
694 694
695 695 def _runworker(self, conn):
696 696 signal.signal(signal.SIGCHLD, self._oldsigchldhandler)
697 697 _initworkerprocess()
698 698 h = self._servicehandler
699 699 try:
700 700 _serverequest(
701 701 self.ui,
702 702 self.repo,
703 703 conn,
704 704 h.createcmdserver,
705 705 prereposetups=[self._reposetup],
706 706 )
707 707 finally:
708 708 gc.collect() # trigger __del__ since worker process uses os._exit
709 709
710 710 def _reposetup(self, ui, repo):
711 711 if not repo.local():
712 712 return
713 713
714 714 class unixcmdserverrepo(repo.__class__):
715 715 def close(self):
716 716 super(unixcmdserverrepo, self).close()
717 717 try:
718 718 self._cmdserveripc.send(self.root)
719 719 except socket.error:
720 720 self.ui.log(
721 721 b'cmdserver', b'failed to send repo root to master\n'
722 722 )
723 723
724 724 repo.__class__ = unixcmdserverrepo
725 725 repo._cmdserveripc = self._workeripc
726 726
727 727 cachedrepo = self._repoloader.get(repo.root)
728 728 if cachedrepo is None:
729 729 return
730 730 repo.ui.log(b'repocache', b'repo from cache: %s\n', repo.root)
731 731 repocache.copycache(cachedrepo, repo)
@@ -1,1182 +1,1182 b''
1 1 # shelve.py - save/restore working directory state
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """save and restore changes to the working directory
9 9
10 10 The "hg shelve" command saves changes made to the working directory
11 11 and reverts those changes, resetting the working directory to a clean
12 12 state.
13 13
14 14 Later on, the "hg unshelve" command restores the changes saved by "hg
15 15 shelve". Changes can be restored even after updating to a different
16 16 parent, in which case Mercurial's merge machinery will resolve any
17 17 conflicts if necessary.
18 18
19 19 You can have more than one shelved change outstanding at a time; each
20 20 shelved change has a distinct name. For details, see the help for "hg
21 21 shelve".
22 22 """
23 23 from __future__ import absolute_import
24 24
25 25 import collections
26 26 import errno
27 27 import itertools
28 28 import stat
29 29
30 30 from .i18n import _
31 31 from .pycompat import open
32 32 from . import (
33 33 bookmarks,
34 34 bundle2,
35 35 bundlerepo,
36 36 changegroup,
37 37 cmdutil,
38 38 discovery,
39 39 error,
40 40 exchange,
41 41 hg,
42 42 lock as lockmod,
43 43 mdiff,
44 44 merge,
45 45 node as nodemod,
46 46 patch,
47 47 phases,
48 48 pycompat,
49 49 repair,
50 50 scmutil,
51 51 templatefilters,
52 52 util,
53 53 vfs as vfsmod,
54 54 )
55 55 from .utils import (
56 56 dateutil,
57 57 stringutil,
58 58 )
59 59
60 60 backupdir = b'shelve-backup'
61 61 shelvedir = b'shelved'
62 62 shelvefileextensions = [b'hg', b'patch', b'shelve']
63 63 # universal extension is present in all types of shelves
64 64 patchextension = b'patch'
65 65
66 66 # we never need the user, so we use a
67 67 # generic user for all shelve operations
68 68 shelveuser = b'shelve@localhost'
69 69
70 70
71 71 class shelvedfile(object):
72 72 """Helper for the file storing a single shelve
73 73
74 74 Handles common functions on shelve files (.hg/.patch) using
75 75 the vfs layer"""
76 76
77 77 def __init__(self, repo, name, filetype=None):
78 78 self.repo = repo
79 79 self.name = name
80 80 self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
81 81 self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
82 82 self.ui = self.repo.ui
83 83 if filetype:
84 84 self.fname = name + b'.' + filetype
85 85 else:
86 86 self.fname = name
87 87
88 88 def exists(self):
89 89 return self.vfs.exists(self.fname)
90 90
91 91 def filename(self):
92 92 return self.vfs.join(self.fname)
93 93
94 94 def backupfilename(self):
95 95 def gennames(base):
96 96 yield base
97 97 base, ext = base.rsplit(b'.', 1)
98 98 for i in itertools.count(1):
99 99 yield b'%s-%d.%s' % (base, i, ext)
100 100
101 101 name = self.backupvfs.join(self.fname)
102 102 for n in gennames(name):
103 103 if not self.backupvfs.exists(n):
104 104 return n
105 105
106 106 def movetobackup(self):
107 107 if not self.backupvfs.isdir():
108 108 self.backupvfs.makedir()
109 109 util.rename(self.filename(), self.backupfilename())
110 110
111 111 def stat(self):
112 112 return self.vfs.stat(self.fname)
113 113
114 114 def opener(self, mode=b'rb'):
115 115 try:
116 116 return self.vfs(self.fname, mode)
117 117 except IOError as err:
118 118 if err.errno != errno.ENOENT:
119 119 raise
120 120 raise error.Abort(_(b"shelved change '%s' not found") % self.name)
121 121
122 122 def applybundle(self, tr):
123 123 fp = self.opener()
124 124 try:
125 125 targetphase = phases.internal
126 126 if not phases.supportinternal(self.repo):
127 127 targetphase = phases.secret
128 128 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
129 129 pretip = self.repo[b'tip']
130 130 bundle2.applybundle(
131 131 self.repo,
132 132 gen,
133 133 tr,
134 134 source=b'unshelve',
135 135 url=b'bundle:' + self.vfs.join(self.fname),
136 136 targetphase=targetphase,
137 137 )
138 138 shelvectx = self.repo[b'tip']
139 139 if pretip == shelvectx:
140 140 shelverev = tr.changes[b'revduplicates'][-1]
141 141 shelvectx = self.repo[shelverev]
142 142 return shelvectx
143 143 finally:
144 144 fp.close()
145 145
146 146 def bundlerepo(self):
147 147 path = self.vfs.join(self.fname)
148 148 return bundlerepo.instance(
149 149 self.repo.baseui, b'bundle://%s+%s' % (self.repo.root, path), False
150 150 )
151 151
152 152 def writebundle(self, bases, node):
153 153 cgversion = changegroup.safeversion(self.repo)
154 154 if cgversion == b'01':
155 155 btype = b'HG10BZ'
156 156 compression = None
157 157 else:
158 158 btype = b'HG20'
159 159 compression = b'BZ'
160 160
161 161 repo = self.repo.unfiltered()
162 162
163 163 outgoing = discovery.outgoing(
164 164 repo, missingroots=bases, missingheads=[node]
165 165 )
166 166 cg = changegroup.makechangegroup(repo, outgoing, cgversion, b'shelve')
167 167
168 168 bundle2.writebundle(
169 169 self.ui, cg, self.fname, btype, self.vfs, compression=compression
170 170 )
171 171
172 172 def writeinfo(self, info):
173 173 scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
174 174
175 175 def readinfo(self):
176 176 return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
177 177
178 178
179 179 class shelvedstate(object):
180 180 """Handle persistence during unshelving operations.
181 181
182 182 Handles saving and restoring a shelved state. Ensures that different
183 183 versions of a shelved state are possible and handles them appropriately.
184 184 """
185 185
186 186 _version = 2
187 187 _filename = b'shelvedstate'
188 188 _keep = b'keep'
189 189 _nokeep = b'nokeep'
190 190 # colon is essential to differentiate from a real bookmark name
191 191 _noactivebook = b':no-active-bookmark'
192 192 _interactive = b'interactive'
193 193
194 194 @classmethod
195 195 def _verifyandtransform(cls, d):
196 196 """Some basic shelvestate syntactic verification and transformation"""
197 197 try:
198 198 d[b'originalwctx'] = nodemod.bin(d[b'originalwctx'])
199 199 d[b'pendingctx'] = nodemod.bin(d[b'pendingctx'])
200 200 d[b'parents'] = [nodemod.bin(h) for h in d[b'parents'].split(b' ')]
201 201 d[b'nodestoremove'] = [
202 202 nodemod.bin(h) for h in d[b'nodestoremove'].split(b' ')
203 203 ]
204 204 except (ValueError, TypeError, KeyError) as err:
205 205 raise error.CorruptedState(pycompat.bytestr(err))
206 206
207 207 @classmethod
208 208 def _getversion(cls, repo):
209 209 """Read version information from shelvestate file"""
210 210 fp = repo.vfs(cls._filename)
211 211 try:
212 212 version = int(fp.readline().strip())
213 213 except ValueError as err:
214 214 raise error.CorruptedState(pycompat.bytestr(err))
215 215 finally:
216 216 fp.close()
217 217 return version
218 218
219 219 @classmethod
220 220 def _readold(cls, repo):
221 221 """Read the old position-based version of a shelvestate file"""
222 222 # Order is important, because old shelvestate file uses it
223 223 # to detemine values of fields (i.g. name is on the second line,
224 224 # originalwctx is on the third and so forth). Please do not change.
225 225 keys = [
226 226 b'version',
227 227 b'name',
228 228 b'originalwctx',
229 229 b'pendingctx',
230 230 b'parents',
231 231 b'nodestoremove',
232 232 b'branchtorestore',
233 233 b'keep',
234 234 b'activebook',
235 235 ]
236 236 # this is executed only seldomly, so it is not a big deal
237 237 # that we open this file twice
238 238 fp = repo.vfs(cls._filename)
239 239 d = {}
240 240 try:
241 241 for key in keys:
242 242 d[key] = fp.readline().strip()
243 243 finally:
244 244 fp.close()
245 245 return d
246 246
247 247 @classmethod
248 248 def load(cls, repo):
249 249 version = cls._getversion(repo)
250 250 if version < cls._version:
251 251 d = cls._readold(repo)
252 252 elif version == cls._version:
253 253 d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename).read(
254 254 firstlinenonkeyval=True
255 255 )
256 256 else:
257 257 raise error.Abort(
258 258 _(
259 259 b'this version of shelve is incompatible '
260 260 b'with the version used in this repo'
261 261 )
262 262 )
263 263
264 264 cls._verifyandtransform(d)
265 265 try:
266 266 obj = cls()
267 267 obj.name = d[b'name']
268 268 obj.wctx = repo[d[b'originalwctx']]
269 269 obj.pendingctx = repo[d[b'pendingctx']]
270 270 obj.parents = d[b'parents']
271 271 obj.nodestoremove = d[b'nodestoremove']
272 272 obj.branchtorestore = d.get(b'branchtorestore', b'')
273 273 obj.keep = d.get(b'keep') == cls._keep
274 274 obj.activebookmark = b''
275 275 if d.get(b'activebook', b'') != cls._noactivebook:
276 276 obj.activebookmark = d.get(b'activebook', b'')
277 277 obj.interactive = d.get(b'interactive') == cls._interactive
278 278 except (error.RepoLookupError, KeyError) as err:
279 279 raise error.CorruptedState(pycompat.bytestr(err))
280 280
281 281 return obj
282 282
283 283 @classmethod
284 284 def save(
285 285 cls,
286 286 repo,
287 287 name,
288 288 originalwctx,
289 289 pendingctx,
290 290 nodestoremove,
291 291 branchtorestore,
292 292 keep=False,
293 293 activebook=b'',
294 294 interactive=False,
295 295 ):
296 296 info = {
297 297 b"name": name,
298 298 b"originalwctx": nodemod.hex(originalwctx.node()),
299 299 b"pendingctx": nodemod.hex(pendingctx.node()),
300 300 b"parents": b' '.join(
301 301 [nodemod.hex(p) for p in repo.dirstate.parents()]
302 302 ),
303 303 b"nodestoremove": b' '.join(
304 304 [nodemod.hex(n) for n in nodestoremove]
305 305 ),
306 306 b"branchtorestore": branchtorestore,
307 307 b"keep": cls._keep if keep else cls._nokeep,
308 308 b"activebook": activebook or cls._noactivebook,
309 309 }
310 310 if interactive:
311 311 info[b'interactive'] = cls._interactive
312 312 scmutil.simplekeyvaluefile(repo.vfs, cls._filename).write(
313 313 info, firstline=(b"%d" % cls._version)
314 314 )
315 315
316 316 @classmethod
317 317 def clear(cls, repo):
318 318 repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
319 319
320 320
321 321 def cleanupoldbackups(repo):
322 322 vfs = vfsmod.vfs(repo.vfs.join(backupdir))
323 323 maxbackups = repo.ui.configint(b'shelve', b'maxbackups')
324 324 hgfiles = [f for f in vfs.listdir() if f.endswith(b'.' + patchextension)]
325 325 hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
326 326 if maxbackups > 0 and maxbackups < len(hgfiles):
327 327 bordermtime = hgfiles[-maxbackups][0]
328 328 else:
329 329 bordermtime = None
330 330 for mtime, f in hgfiles[: len(hgfiles) - maxbackups]:
331 331 if mtime == bordermtime:
332 332 # keep it, because timestamp can't decide exact order of backups
333 333 continue
334 334 base = f[: -(1 + len(patchextension))]
335 335 for ext in shelvefileextensions:
336 336 vfs.tryunlink(base + b'.' + ext)
337 337
338 338
339 339 def _backupactivebookmark(repo):
340 340 activebookmark = repo._activebookmark
341 341 if activebookmark:
342 342 bookmarks.deactivate(repo)
343 343 return activebookmark
344 344
345 345
346 346 def _restoreactivebookmark(repo, mark):
347 347 if mark:
348 348 bookmarks.activate(repo, mark)
349 349
350 350
351 351 def _aborttransaction(repo, tr):
352 352 '''Abort current transaction for shelve/unshelve, but keep dirstate
353 353 '''
354 354 dirstatebackupname = b'dirstate.shelve'
355 355 repo.dirstate.savebackup(tr, dirstatebackupname)
356 356 tr.abort()
357 357 repo.dirstate.restorebackup(None, dirstatebackupname)
358 358
359 359
360 360 def getshelvename(repo, parent, opts):
361 361 """Decide on the name this shelve is going to have"""
362 362
363 363 def gennames():
364 364 yield label
365 365 for i in itertools.count(1):
366 366 yield b'%s-%02d' % (label, i)
367 367
368 368 name = opts.get(b'name')
369 369 label = repo._activebookmark or parent.branch() or b'default'
370 370 # slashes aren't allowed in filenames, therefore we rename it
371 371 label = label.replace(b'/', b'_')
372 372 label = label.replace(b'\\', b'_')
373 373 # filenames must not start with '.' as it should not be hidden
374 374 if label.startswith(b'.'):
375 375 label = label.replace(b'.', b'_', 1)
376 376
377 377 if name:
378 378 if shelvedfile(repo, name, patchextension).exists():
379 379 e = _(b"a shelved change named '%s' already exists") % name
380 380 raise error.Abort(e)
381 381
382 382 # ensure we are not creating a subdirectory or a hidden file
383 383 if b'/' in name or b'\\' in name:
384 384 raise error.Abort(
385 385 _(b'shelved change names can not contain slashes')
386 386 )
387 387 if name.startswith(b'.'):
388 388 raise error.Abort(_(b"shelved change names can not start with '.'"))
389 389
390 390 else:
391 391 for n in gennames():
392 392 if not shelvedfile(repo, n, patchextension).exists():
393 393 name = n
394 394 break
395 395
396 396 return name
397 397
398 398
399 399 def mutableancestors(ctx):
400 400 """return all mutable ancestors for ctx (included)
401 401
402 402 Much faster than the revset ancestors(ctx) & draft()"""
403 403 seen = {nodemod.nullrev}
404 404 visit = collections.deque()
405 405 visit.append(ctx)
406 406 while visit:
407 407 ctx = visit.popleft()
408 408 yield ctx.node()
409 409 for parent in ctx.parents():
410 410 rev = parent.rev()
411 411 if rev not in seen:
412 412 seen.add(rev)
413 413 if parent.mutable():
414 414 visit.append(parent)
415 415
416 416
417 417 def getcommitfunc(extra, interactive, editor=False):
418 418 def commitfunc(ui, repo, message, match, opts):
419 419 hasmq = util.safehasattr(repo, b'mq')
420 420 if hasmq:
421 421 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
422 422
423 423 targetphase = phases.internal
424 424 if not phases.supportinternal(repo):
425 425 targetphase = phases.secret
426 426 overrides = {(b'phases', b'new-commit'): targetphase}
427 427 try:
428 428 editor_ = False
429 429 if editor:
430 430 editor_ = cmdutil.getcommiteditor(
431 431 editform=b'shelve.shelve', **pycompat.strkwargs(opts)
432 432 )
433 433 with repo.ui.configoverride(overrides):
434 434 return repo.commit(
435 435 message,
436 436 shelveuser,
437 437 opts.get(b'date'),
438 438 match,
439 439 editor=editor_,
440 440 extra=extra,
441 441 )
442 442 finally:
443 443 if hasmq:
444 444 repo.mq.checkapplied = saved
445 445
446 446 def interactivecommitfunc(ui, repo, *pats, **opts):
447 447 opts = pycompat.byteskwargs(opts)
448 448 match = scmutil.match(repo[b'.'], pats, {})
449 449 message = opts[b'message']
450 450 return commitfunc(ui, repo, message, match, opts)
451 451
452 452 return interactivecommitfunc if interactive else commitfunc
453 453
454 454
455 455 def _nothingtoshelvemessaging(ui, repo, pats, opts):
456 456 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
457 457 if stat.deleted:
458 458 ui.status(
459 459 _(b"nothing changed (%d missing files, see 'hg status')\n")
460 460 % len(stat.deleted)
461 461 )
462 462 else:
463 463 ui.status(_(b"nothing changed\n"))
464 464
465 465
466 466 def _shelvecreatedcommit(repo, node, name, match):
467 467 info = {b'node': nodemod.hex(node)}
468 468 shelvedfile(repo, name, b'shelve').writeinfo(info)
469 469 bases = list(mutableancestors(repo[node]))
470 470 shelvedfile(repo, name, b'hg').writebundle(bases, node)
471 471 with shelvedfile(repo, name, patchextension).opener(b'wb') as fp:
472 472 cmdutil.exportfile(
473 473 repo, [node], fp, opts=mdiff.diffopts(git=True), match=match
474 474 )
475 475
476 476
477 477 def _includeunknownfiles(repo, pats, opts, extra):
478 478 s = repo.status(match=scmutil.match(repo[None], pats, opts), unknown=True)
479 479 if s.unknown:
480 480 extra[b'shelve_unknown'] = b'\0'.join(s.unknown)
481 481 repo[None].add(s.unknown)
482 482
483 483
484 484 def _finishshelve(repo, tr):
485 485 if phases.supportinternal(repo):
486 486 tr.close()
487 487 else:
488 488 _aborttransaction(repo, tr)
489 489
490 490
491 491 def createcmd(ui, repo, pats, opts):
492 492 """subcommand that creates a new shelve"""
493 493 with repo.wlock():
494 494 cmdutil.checkunfinished(repo)
495 495 return _docreatecmd(ui, repo, pats, opts)
496 496
497 497
498 498 def _docreatecmd(ui, repo, pats, opts):
499 499 wctx = repo[None]
500 500 parents = wctx.parents()
501 501 parent = parents[0]
502 502 origbranch = wctx.branch()
503 503
504 504 if parent.node() != nodemod.nullid:
505 505 desc = b"changes to: %s" % parent.description().split(b'\n', 1)[0]
506 506 else:
507 507 desc = b'(changes in empty repository)'
508 508
509 509 if not opts.get(b'message'):
510 510 opts[b'message'] = desc
511 511
512 512 lock = tr = activebookmark = None
513 513 try:
514 514 lock = repo.lock()
515 515
516 516 # use an uncommitted transaction to generate the bundle to avoid
517 517 # pull races. ensure we don't print the abort message to stderr.
518 518 tr = repo.transaction(b'shelve', report=lambda x: None)
519 519
520 520 interactive = opts.get(b'interactive', False)
521 521 includeunknown = opts.get(b'unknown', False) and not opts.get(
522 522 b'addremove', False
523 523 )
524 524
525 525 name = getshelvename(repo, parent, opts)
526 526 activebookmark = _backupactivebookmark(repo)
527 527 extra = {b'internal': b'shelve'}
528 528 if includeunknown:
529 529 _includeunknownfiles(repo, pats, opts, extra)
530 530
531 531 if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
532 532 # In non-bare shelve we don't store newly created branch
533 533 # at bundled commit
534 534 repo.dirstate.setbranch(repo[b'.'].branch())
535 535
536 536 commitfunc = getcommitfunc(extra, interactive, editor=True)
537 537 if not interactive:
538 538 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
539 539 else:
540 540 node = cmdutil.dorecord(
541 541 ui,
542 542 repo,
543 543 commitfunc,
544 544 None,
545 545 False,
546 546 cmdutil.recordfilter,
547 547 *pats,
548 548 **pycompat.strkwargs(opts)
549 549 )
550 550 if not node:
551 551 _nothingtoshelvemessaging(ui, repo, pats, opts)
552 552 return 1
553 553
554 554 # Create a matcher so that prefetch doesn't attempt to fetch
555 555 # the entire repository pointlessly, and as an optimisation
556 556 # for movedirstate, if needed.
557 557 match = scmutil.matchfiles(repo, repo[node].files())
558 558 _shelvecreatedcommit(repo, node, name, match)
559 559
560 560 ui.status(_(b'shelved as %s\n') % name)
561 561 if opts[b'keep']:
562 562 with repo.dirstate.parentchange():
563 563 scmutil.movedirstate(repo, parent, match)
564 564 else:
565 565 hg.update(repo, parent.node())
566 566 if origbranch != repo[b'.'].branch() and not _isbareshelve(pats, opts):
567 567 repo.dirstate.setbranch(origbranch)
568 568
569 569 _finishshelve(repo, tr)
570 570 finally:
571 571 _restoreactivebookmark(repo, activebookmark)
572 572 lockmod.release(tr, lock)
573 573
574 574
575 575 def _isbareshelve(pats, opts):
576 576 return (
577 577 not pats
578 578 and not opts.get(b'interactive', False)
579 579 and not opts.get(b'include', False)
580 580 and not opts.get(b'exclude', False)
581 581 )
582 582
583 583
584 584 def _iswctxonnewbranch(repo):
585 585 return repo[None].branch() != repo[b'.'].branch()
586 586
587 587
588 588 def cleanupcmd(ui, repo):
589 589 """subcommand that deletes all shelves"""
590 590
591 591 with repo.wlock():
592 592 for (name, _type) in repo.vfs.readdir(shelvedir):
593 593 suffix = name.rsplit(b'.', 1)[-1]
594 594 if suffix in shelvefileextensions:
595 595 shelvedfile(repo, name).movetobackup()
596 596 cleanupoldbackups(repo)
597 597
598 598
599 599 def deletecmd(ui, repo, pats):
600 600 """subcommand that deletes a specific shelve"""
601 601 if not pats:
602 602 raise error.Abort(_(b'no shelved changes specified!'))
603 603 with repo.wlock():
604 604 for name in pats:
605 605 try:
606 606 for suffix in shelvefileextensions:
607 607 shfile = shelvedfile(repo, name, suffix)
608 608 # patch file is necessary, as it should
609 609 # be present for any kind of shelve,
610 610 # but the .hg file is optional as in future we
611 611 # will add obsolete shelve with does not create a
612 612 # bundle
613 613 if shfile.exists() or suffix == patchextension:
614 614 shfile.movetobackup()
615 615 except OSError as err:
616 616 if err.errno != errno.ENOENT:
617 617 raise
618 618 raise error.Abort(_(b"shelved change '%s' not found") % name)
619 619 cleanupoldbackups(repo)
620 620
621 621
622 622 def listshelves(repo):
623 623 """return all shelves in repo as list of (time, filename)"""
624 624 try:
625 625 names = repo.vfs.readdir(shelvedir)
626 626 except OSError as err:
627 627 if err.errno != errno.ENOENT:
628 628 raise
629 629 return []
630 630 info = []
631 631 for (name, _type) in names:
632 632 pfx, sfx = name.rsplit(b'.', 1)
633 633 if not pfx or sfx != patchextension:
634 634 continue
635 635 st = shelvedfile(repo, name).stat()
636 636 info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
637 637 return sorted(info, reverse=True)
638 638
639 639
640 640 def listcmd(ui, repo, pats, opts):
641 641 """subcommand that displays the list of shelves"""
642 642 pats = set(pats)
643 643 width = 80
644 644 if not ui.plain():
645 645 width = ui.termwidth()
646 646 namelabel = b'shelve.newest'
647 647 ui.pager(b'shelve')
648 648 for mtime, name in listshelves(repo):
649 649 sname = util.split(name)[1]
650 650 if pats and sname not in pats:
651 651 continue
652 652 ui.write(sname, label=namelabel)
653 653 namelabel = b'shelve.name'
654 654 if ui.quiet:
655 655 ui.write(b'\n')
656 656 continue
657 657 ui.write(b' ' * (16 - len(sname)))
658 658 used = 16
659 659 date = dateutil.makedate(mtime)
660 660 age = b'(%s)' % templatefilters.age(date, abbrev=True)
661 661 ui.write(age, label=b'shelve.age')
662 662 ui.write(b' ' * (12 - len(age)))
663 663 used += 12
664 664 with open(name + b'.' + patchextension, b'rb') as fp:
665 665 while True:
666 666 line = fp.readline()
667 667 if not line:
668 668 break
669 669 if not line.startswith(b'#'):
670 670 desc = line.rstrip()
671 671 if ui.formatted():
672 672 desc = stringutil.ellipsis(desc, width - used)
673 673 ui.write(desc)
674 674 break
675 675 ui.write(b'\n')
676 676 if not (opts[b'patch'] or opts[b'stat']):
677 677 continue
678 678 difflines = fp.readlines()
679 679 if opts[b'patch']:
680 680 for chunk, label in patch.difflabel(iter, difflines):
681 681 ui.write(chunk, label=label)
682 682 if opts[b'stat']:
683 683 for chunk, label in patch.diffstatui(difflines, width=width):
684 684 ui.write(chunk, label=label)
685 685
686 686
687 687 def patchcmds(ui, repo, pats, opts):
688 688 """subcommand that displays shelves"""
689 689 if len(pats) == 0:
690 690 shelves = listshelves(repo)
691 691 if not shelves:
692 692 raise error.Abort(_(b"there are no shelves to show"))
693 693 mtime, name = shelves[0]
694 694 sname = util.split(name)[1]
695 695 pats = [sname]
696 696
697 697 for shelfname in pats:
698 698 if not shelvedfile(repo, shelfname, patchextension).exists():
699 699 raise error.Abort(_(b"cannot find shelf %s") % shelfname)
700 700
701 701 listcmd(ui, repo, pats, opts)
702 702
703 703
704 704 def checkparents(repo, state):
705 705 """check parent while resuming an unshelve"""
706 706 if state.parents != repo.dirstate.parents():
707 707 raise error.Abort(
708 708 _(b'working directory parents do not match unshelve state')
709 709 )
710 710
711 711
712 712 def _loadshelvedstate(ui, repo, opts):
713 713 try:
714 714 state = shelvedstate.load(repo)
715 715 if opts.get(b'keep') is None:
716 716 opts[b'keep'] = state.keep
717 717 except IOError as err:
718 718 if err.errno != errno.ENOENT:
719 719 raise
720 720 cmdutil.wrongtooltocontinue(repo, _(b'unshelve'))
721 721 except error.CorruptedState as err:
722 722 ui.debug(pycompat.bytestr(err) + b'\n')
723 723 if opts.get(b'continue'):
724 724 msg = _(b'corrupted shelved state file')
725 725 hint = _(
726 726 b'please run hg unshelve --abort to abort unshelve '
727 727 b'operation'
728 728 )
729 729 raise error.Abort(msg, hint=hint)
730 730 elif opts.get(b'abort'):
731 731 shelvedstate.clear(repo)
732 732 raise error.Abort(
733 733 _(
734 734 b'could not read shelved state file, your '
735 735 b'working copy may be in an unexpected state\n'
736 736 b'please update to some commit\n'
737 737 )
738 738 )
739 739 return state
740 740
741 741
742 742 def unshelveabort(ui, repo, state):
743 743 """subcommand that abort an in-progress unshelve"""
744 744 with repo.lock():
745 745 try:
746 746 checkparents(repo, state)
747 747
748 748 merge.clean_update(state.pendingctx)
749 749 if state.activebookmark and state.activebookmark in repo._bookmarks:
750 750 bookmarks.activate(repo, state.activebookmark)
751 751 mergefiles(ui, repo, state.wctx, state.pendingctx)
752 752 if not phases.supportinternal(repo):
753 753 repair.strip(
754 754 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
755 755 )
756 756 finally:
757 757 shelvedstate.clear(repo)
758 758 ui.warn(_(b"unshelve of '%s' aborted\n") % state.name)
759 759
760 760
761 761 def hgabortunshelve(ui, repo):
762 762 """logic to abort unshelve using 'hg abort"""
763 763 with repo.wlock():
764 764 state = _loadshelvedstate(ui, repo, {b'abort': True})
765 765 return unshelveabort(ui, repo, state)
766 766
767 767
768 768 def mergefiles(ui, repo, wctx, shelvectx):
769 769 """updates to wctx and merges the changes from shelvectx into the
770 770 dirstate."""
771 771 with ui.configoverride({(b'ui', b'quiet'): True}):
772 772 hg.update(repo, wctx.node())
773 773 ui.pushbuffer(True)
774 774 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents())
775 775 ui.popbuffer()
776 776
777 777
778 778 def restorebranch(ui, repo, branchtorestore):
779 779 if branchtorestore and branchtorestore != repo.dirstate.branch():
780 780 repo.dirstate.setbranch(branchtorestore)
781 781 ui.status(
782 782 _(b'marked working directory as branch %s\n') % branchtorestore
783 783 )
784 784
785 785
786 786 def unshelvecleanup(ui, repo, name, opts):
787 787 """remove related files after an unshelve"""
788 788 if not opts.get(b'keep'):
789 789 for filetype in shelvefileextensions:
790 790 shfile = shelvedfile(repo, name, filetype)
791 791 if shfile.exists():
792 792 shfile.movetobackup()
793 793 cleanupoldbackups(repo)
794 794
795 795
796 796 def unshelvecontinue(ui, repo, state, opts):
797 797 """subcommand to continue an in-progress unshelve"""
798 798 # We're finishing off a merge. First parent is our original
799 799 # parent, second is the temporary "fake" commit we're unshelving.
800 800 interactive = state.interactive
801 801 basename = state.name
802 802 with repo.lock():
803 803 checkparents(repo, state)
804 804 ms = merge.mergestate.read(repo)
805 805 if list(ms.unresolved()):
806 806 raise error.Abort(
807 807 _(b"unresolved conflicts, can't continue"),
808 808 hint=_(b"see 'hg resolve', then 'hg unshelve --continue'"),
809 809 )
810 810
811 811 shelvectx = repo[state.parents[1]]
812 812 pendingctx = state.pendingctx
813 813
814 814 with repo.dirstate.parentchange():
815 815 repo.setparents(state.pendingctx.node(), nodemod.nullid)
816 816 repo.dirstate.write(repo.currenttransaction())
817 817
818 818 targetphase = phases.internal
819 819 if not phases.supportinternal(repo):
820 820 targetphase = phases.secret
821 821 overrides = {(b'phases', b'new-commit'): targetphase}
822 822 with repo.ui.configoverride(overrides, b'unshelve'):
823 823 with repo.dirstate.parentchange():
824 824 repo.setparents(state.parents[0], nodemod.nullid)
825 825 newnode, ispartialunshelve = _createunshelvectx(
826 826 ui, repo, shelvectx, basename, interactive, opts
827 827 )
828 828
829 829 if newnode is None:
830 830 # If it ended up being a no-op commit, then the normal
831 831 # merge state clean-up path doesn't happen, so do it
832 832 # here. Fix issue5494
833 833 merge.mergestate.clean(repo)
834 834 shelvectx = state.pendingctx
835 835 msg = _(
836 836 b'note: unshelved changes already existed '
837 837 b'in the working copy\n'
838 838 )
839 839 ui.status(msg)
840 840 else:
841 841 # only strip the shelvectx if we produced one
842 842 state.nodestoremove.append(newnode)
843 843 shelvectx = repo[newnode]
844 844
845 845 hg.updaterepo(repo, pendingctx.node(), overwrite=False)
846 846 mergefiles(ui, repo, state.wctx, shelvectx)
847 847 restorebranch(ui, repo, state.branchtorestore)
848 848
849 849 if not phases.supportinternal(repo):
850 850 repair.strip(
851 851 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
852 852 )
853 853 shelvedstate.clear(repo)
854 854 if not ispartialunshelve:
855 855 unshelvecleanup(ui, repo, state.name, opts)
856 856 _restoreactivebookmark(repo, state.activebookmark)
857 857 ui.status(_(b"unshelve of '%s' complete\n") % state.name)
858 858
859 859
860 860 def hgcontinueunshelve(ui, repo):
861 861 """logic to resume unshelve using 'hg continue'"""
862 862 with repo.wlock():
863 863 state = _loadshelvedstate(ui, repo, {b'continue': True})
864 864 return unshelvecontinue(ui, repo, state, {b'keep': state.keep})
865 865
866 866
867 867 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
868 868 """Temporarily commit working copy changes before moving unshelve commit"""
869 869 # Store pending changes in a commit and remember added in case a shelve
870 870 # contains unknown files that are part of the pending change
871 871 s = repo.status()
872 872 addedbefore = frozenset(s.added)
873 873 if not (s.modified or s.added or s.removed):
874 874 return tmpwctx, addedbefore
875 875 ui.status(
876 876 _(
877 877 b"temporarily committing pending changes "
878 878 b"(restore with 'hg unshelve --abort')\n"
879 879 )
880 880 )
881 881 extra = {b'internal': b'shelve'}
882 882 commitfunc = getcommitfunc(extra=extra, interactive=False, editor=False)
883 883 tempopts = {}
884 884 tempopts[b'message'] = b"pending changes temporary commit"
885 885 tempopts[b'date'] = opts.get(b'date')
886 886 with ui.configoverride({(b'ui', b'quiet'): True}):
887 887 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
888 888 tmpwctx = repo[node]
889 889 return tmpwctx, addedbefore
890 890
891 891
892 892 def _unshelverestorecommit(ui, repo, tr, basename):
893 893 """Recreate commit in the repository during the unshelve"""
894 894 repo = repo.unfiltered()
895 895 node = None
896 896 if shelvedfile(repo, basename, b'shelve').exists():
897 897 node = shelvedfile(repo, basename, b'shelve').readinfo()[b'node']
898 898 if node is None or node not in repo:
899 899 with ui.configoverride({(b'ui', b'quiet'): True}):
900 900 shelvectx = shelvedfile(repo, basename, b'hg').applybundle(tr)
901 901 # We might not strip the unbundled changeset, so we should keep track of
902 902 # the unshelve node in case we need to reuse it (eg: unshelve --keep)
903 903 if node is None:
904 904 info = {b'node': nodemod.hex(shelvectx.node())}
905 905 shelvedfile(repo, basename, b'shelve').writeinfo(info)
906 906 else:
907 907 shelvectx = repo[node]
908 908
909 909 return repo, shelvectx
910 910
911 911
912 912 def _createunshelvectx(ui, repo, shelvectx, basename, interactive, opts):
913 913 """Handles the creation of unshelve commit and updates the shelve if it
914 914 was partially unshelved.
915 915
916 916 If interactive is:
917 917
918 918 * False: Commits all the changes in the working directory.
919 919 * True: Prompts the user to select changes to unshelve and commit them.
920 920 Update the shelve with remaining changes.
921 921
922 922 Returns the node of the new commit formed and a bool indicating whether
923 923 the shelve was partially unshelved.Creates a commit ctx to unshelve
924 924 interactively or non-interactively.
925 925
926 926 The user might want to unshelve certain changes only from the stored
927 927 shelve in interactive. So, we would create two commits. One with requested
928 928 changes to unshelve at that time and the latter is shelved for future.
929 929
930 930 Here, we return both the newnode which is created interactively and a
931 931 bool to know whether the shelve is partly done or completely done.
932 932 """
933 933 opts[b'message'] = shelvectx.description()
934 934 opts[b'interactive-unshelve'] = True
935 935 pats = []
936 936 if not interactive:
937 937 newnode = repo.commit(
938 938 text=shelvectx.description(),
939 939 extra=shelvectx.extra(),
940 940 user=shelvectx.user(),
941 941 date=shelvectx.date(),
942 942 )
943 943 return newnode, False
944 944
945 945 commitfunc = getcommitfunc(shelvectx.extra(), interactive=True, editor=True)
946 946 newnode = cmdutil.dorecord(
947 947 ui,
948 948 repo,
949 949 commitfunc,
950 950 None,
951 951 False,
952 952 cmdutil.recordfilter,
953 953 *pats,
954 954 **pycompat.strkwargs(opts)
955 955 )
956 956 snode = repo.commit(
957 957 text=shelvectx.description(),
958 958 extra=shelvectx.extra(),
959 959 user=shelvectx.user(),
960 960 )
961 961 if snode:
962 962 m = scmutil.matchfiles(repo, repo[snode].files())
963 963 _shelvecreatedcommit(repo, snode, basename, m)
964 964
965 965 return newnode, bool(snode)
966 966
967 967
968 968 def _rebaserestoredcommit(
969 969 ui,
970 970 repo,
971 971 opts,
972 972 tr,
973 973 oldtiprev,
974 974 basename,
975 975 pctx,
976 976 tmpwctx,
977 977 shelvectx,
978 978 branchtorestore,
979 979 activebookmark,
980 980 ):
981 981 """Rebase restored commit from its original location to a destination"""
982 982 # If the shelve is not immediately on top of the commit
983 983 # we'll be merging with, rebase it to be on top.
984 984 interactive = opts.get(b'interactive')
985 985 if tmpwctx.node() == shelvectx.p1().node() and not interactive:
986 986 # We won't skip on interactive mode because, the user might want to
987 987 # unshelve certain changes only.
988 988 return shelvectx, False
989 989
990 990 overrides = {
991 991 (b'ui', b'forcemerge'): opts.get(b'tool', b''),
992 992 (b'phases', b'new-commit'): phases.secret,
993 993 }
994 994 with repo.ui.configoverride(overrides, b'unshelve'):
995 995 ui.status(_(b'rebasing shelved changes\n'))
996 996 stats = merge.graft(
997 997 repo,
998 998 shelvectx,
999 labels=[b'shelve', b'working-copy'],
999 labels=[b'working-copy', b'shelve'],
1000 1000 keepconflictparent=True,
1001 1001 )
1002 1002 if stats.unresolvedcount:
1003 1003 tr.close()
1004 1004
1005 1005 nodestoremove = [
1006 1006 repo.changelog.node(rev)
1007 1007 for rev in pycompat.xrange(oldtiprev, len(repo))
1008 1008 ]
1009 1009 shelvedstate.save(
1010 1010 repo,
1011 1011 basename,
1012 1012 pctx,
1013 1013 tmpwctx,
1014 1014 nodestoremove,
1015 1015 branchtorestore,
1016 1016 opts.get(b'keep'),
1017 1017 activebookmark,
1018 1018 interactive,
1019 1019 )
1020 1020 raise error.InterventionRequired(
1021 1021 _(
1022 1022 b"unresolved conflicts (see 'hg resolve', then "
1023 1023 b"'hg unshelve --continue')"
1024 1024 )
1025 1025 )
1026 1026
1027 1027 with repo.dirstate.parentchange():
1028 1028 repo.setparents(tmpwctx.node(), nodemod.nullid)
1029 1029 newnode, ispartialunshelve = _createunshelvectx(
1030 1030 ui, repo, shelvectx, basename, interactive, opts
1031 1031 )
1032 1032
1033 1033 if newnode is None:
1034 1034 # If it ended up being a no-op commit, then the normal
1035 1035 # merge state clean-up path doesn't happen, so do it
1036 1036 # here. Fix issue5494
1037 1037 merge.mergestate.clean(repo)
1038 1038 shelvectx = tmpwctx
1039 1039 msg = _(
1040 1040 b'note: unshelved changes already existed '
1041 1041 b'in the working copy\n'
1042 1042 )
1043 1043 ui.status(msg)
1044 1044 else:
1045 1045 shelvectx = repo[newnode]
1046 1046 hg.updaterepo(repo, tmpwctx.node(), False)
1047 1047
1048 1048 return shelvectx, ispartialunshelve
1049 1049
1050 1050
1051 1051 def _forgetunknownfiles(repo, shelvectx, addedbefore):
1052 1052 # Forget any files that were unknown before the shelve, unknown before
1053 1053 # unshelve started, but are now added.
1054 1054 shelveunknown = shelvectx.extra().get(b'shelve_unknown')
1055 1055 if not shelveunknown:
1056 1056 return
1057 1057 shelveunknown = frozenset(shelveunknown.split(b'\0'))
1058 1058 addedafter = frozenset(repo.status().added)
1059 1059 toforget = (addedafter & shelveunknown) - addedbefore
1060 1060 repo[None].forget(toforget)
1061 1061
1062 1062
1063 1063 def _finishunshelve(repo, oldtiprev, tr, activebookmark):
1064 1064 _restoreactivebookmark(repo, activebookmark)
1065 1065 # The transaction aborting will strip all the commits for us,
1066 1066 # but it doesn't update the inmemory structures, so addchangegroup
1067 1067 # hooks still fire and try to operate on the missing commits.
1068 1068 # Clean up manually to prevent this.
1069 1069 repo.unfiltered().changelog.strip(oldtiprev, tr)
1070 1070 _aborttransaction(repo, tr)
1071 1071
1072 1072
1073 1073 def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
1074 1074 """Check potential problems which may result from working
1075 1075 copy having untracked changes."""
1076 1076 wcdeleted = set(repo.status().deleted)
1077 1077 shelvetouched = set(shelvectx.files())
1078 1078 intersection = wcdeleted.intersection(shelvetouched)
1079 1079 if intersection:
1080 1080 m = _(b"shelved change touches missing files")
1081 1081 hint = _(b"run hg status to see which files are missing")
1082 1082 raise error.Abort(m, hint=hint)
1083 1083
1084 1084
1085 1085 def dounshelve(ui, repo, *shelved, **opts):
1086 1086 opts = pycompat.byteskwargs(opts)
1087 1087 abortf = opts.get(b'abort')
1088 1088 continuef = opts.get(b'continue')
1089 1089 interactive = opts.get(b'interactive')
1090 1090 if not abortf and not continuef:
1091 1091 cmdutil.checkunfinished(repo)
1092 1092 shelved = list(shelved)
1093 1093 if opts.get(b"name"):
1094 1094 shelved.append(opts[b"name"])
1095 1095
1096 1096 if interactive and opts.get(b'keep'):
1097 1097 raise error.Abort(_(b'--keep on --interactive is not yet supported'))
1098 1098 if abortf or continuef:
1099 1099 if abortf and continuef:
1100 1100 raise error.Abort(_(b'cannot use both abort and continue'))
1101 1101 if shelved:
1102 1102 raise error.Abort(
1103 1103 _(
1104 1104 b'cannot combine abort/continue with '
1105 1105 b'naming a shelved change'
1106 1106 )
1107 1107 )
1108 1108 if abortf and opts.get(b'tool', False):
1109 1109 ui.warn(_(b'tool option will be ignored\n'))
1110 1110
1111 1111 state = _loadshelvedstate(ui, repo, opts)
1112 1112 if abortf:
1113 1113 return unshelveabort(ui, repo, state)
1114 1114 elif continuef and interactive:
1115 1115 raise error.Abort(_(b'cannot use both continue and interactive'))
1116 1116 elif continuef:
1117 1117 return unshelvecontinue(ui, repo, state, opts)
1118 1118 elif len(shelved) > 1:
1119 1119 raise error.Abort(_(b'can only unshelve one change at a time'))
1120 1120 elif not shelved:
1121 1121 shelved = listshelves(repo)
1122 1122 if not shelved:
1123 1123 raise error.Abort(_(b'no shelved changes to apply!'))
1124 1124 basename = util.split(shelved[0][1])[1]
1125 1125 ui.status(_(b"unshelving change '%s'\n") % basename)
1126 1126 else:
1127 1127 basename = shelved[0]
1128 1128
1129 1129 if not shelvedfile(repo, basename, patchextension).exists():
1130 1130 raise error.Abort(_(b"shelved change '%s' not found") % basename)
1131 1131
1132 1132 repo = repo.unfiltered()
1133 1133 lock = tr = None
1134 1134 try:
1135 1135 lock = repo.lock()
1136 1136 tr = repo.transaction(b'unshelve', report=lambda x: None)
1137 1137 oldtiprev = len(repo)
1138 1138
1139 1139 pctx = repo[b'.']
1140 1140 tmpwctx = pctx
1141 1141 # The goal is to have a commit structure like so:
1142 1142 # ...-> pctx -> tmpwctx -> shelvectx
1143 1143 # where tmpwctx is an optional commit with the user's pending changes
1144 1144 # and shelvectx is the unshelved changes. Then we merge it all down
1145 1145 # to the original pctx.
1146 1146
1147 1147 activebookmark = _backupactivebookmark(repo)
1148 1148 tmpwctx, addedbefore = _commitworkingcopychanges(
1149 1149 ui, repo, opts, tmpwctx
1150 1150 )
1151 1151 repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
1152 1152 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
1153 1153 branchtorestore = b''
1154 1154 if shelvectx.branch() != shelvectx.p1().branch():
1155 1155 branchtorestore = shelvectx.branch()
1156 1156
1157 1157 shelvectx, ispartialunshelve = _rebaserestoredcommit(
1158 1158 ui,
1159 1159 repo,
1160 1160 opts,
1161 1161 tr,
1162 1162 oldtiprev,
1163 1163 basename,
1164 1164 pctx,
1165 1165 tmpwctx,
1166 1166 shelvectx,
1167 1167 branchtorestore,
1168 1168 activebookmark,
1169 1169 )
1170 1170 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
1171 1171 with ui.configoverride(overrides, b'unshelve'):
1172 1172 mergefiles(ui, repo, pctx, shelvectx)
1173 1173 restorebranch(ui, repo, branchtorestore)
1174 1174 shelvedstate.clear(repo)
1175 1175 _finishunshelve(repo, oldtiprev, tr, activebookmark)
1176 1176 _forgetunknownfiles(repo, shelvectx, addedbefore)
1177 1177 if not ispartialunshelve:
1178 1178 unshelvecleanup(ui, repo, basename, opts)
1179 1179 finally:
1180 1180 if tr:
1181 1181 tr.release()
1182 1182 lockmod.release(lock)
@@ -1,1106 +1,1111 b''
1 1 #if windows
2 2 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
3 3 #else
4 4 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
5 5 #endif
6 6 $ export PYTHONPATH
7 7
8 8 typical client does not want echo-back messages, so test without it:
9 9
10 10 $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
11 11 $ mv $HGRCPATH.new $HGRCPATH
12 12
13 13 $ hg init repo
14 14 $ cd repo
15 15
16 16 >>> from __future__ import absolute_import
17 17 >>> import os
18 18 >>> import sys
19 19 >>> from hgclient import bprint, check, readchannel, runcommand
20 20 >>> @check
21 21 ... def hellomessage(server):
22 22 ... ch, data = readchannel(server)
23 23 ... bprint(b'%c, %r' % (ch, data))
24 24 ... # run an arbitrary command to make sure the next thing the server
25 25 ... # sends isn't part of the hello message
26 26 ... runcommand(server, [b'id'])
27 27 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
28 28 *** runcommand id
29 29 000000000000 tip
30 30
31 31 >>> from hgclient import check
32 32 >>> @check
33 33 ... def unknowncommand(server):
34 34 ... server.stdin.write(b'unknowncommand\n')
35 35 abort: unknown command unknowncommand
36 36
37 37 >>> from hgclient import check, readchannel, runcommand
38 38 >>> @check
39 39 ... def checkruncommand(server):
40 40 ... # hello block
41 41 ... readchannel(server)
42 42 ...
43 43 ... # no args
44 44 ... runcommand(server, [])
45 45 ...
46 46 ... # global options
47 47 ... runcommand(server, [b'id', b'--quiet'])
48 48 ...
49 49 ... # make sure global options don't stick through requests
50 50 ... runcommand(server, [b'id'])
51 51 ...
52 52 ... # --config
53 53 ... runcommand(server, [b'id', b'--config', b'ui.quiet=True'])
54 54 ...
55 55 ... # make sure --config doesn't stick
56 56 ... runcommand(server, [b'id'])
57 57 ...
58 58 ... # negative return code should be masked
59 59 ... runcommand(server, [b'id', b'-runknown'])
60 60 *** runcommand
61 61 Mercurial Distributed SCM
62 62
63 63 basic commands:
64 64
65 65 add add the specified files on the next commit
66 66 annotate show changeset information by line for each file
67 67 clone make a copy of an existing repository
68 68 commit commit the specified files or all outstanding changes
69 69 diff diff repository (or selected files)
70 70 export dump the header and diffs for one or more changesets
71 71 forget forget the specified files on the next commit
72 72 init create a new repository in the given directory
73 73 log show revision history of entire repository or files
74 74 merge merge another revision into working directory
75 75 pull pull changes from the specified source
76 76 push push changes to the specified destination
77 77 remove remove the specified files on the next commit
78 78 serve start stand-alone webserver
79 79 status show changed files in the working directory
80 80 summary summarize working directory state
81 81 update update working directory (or switch revisions)
82 82
83 83 (use 'hg help' for the full list of commands or 'hg -v' for details)
84 84 *** runcommand id --quiet
85 85 000000000000
86 86 *** runcommand id
87 87 000000000000 tip
88 88 *** runcommand id --config ui.quiet=True
89 89 000000000000
90 90 *** runcommand id
91 91 000000000000 tip
92 92 *** runcommand id -runknown
93 93 abort: unknown revision 'unknown'!
94 94 [255]
95 95
96 96 >>> from hgclient import bprint, check, readchannel
97 97 >>> @check
98 98 ... def inputeof(server):
99 99 ... readchannel(server)
100 100 ... server.stdin.write(b'runcommand\n')
101 101 ... # close stdin while server is waiting for input
102 102 ... server.stdin.close()
103 103 ...
104 104 ... # server exits with 1 if the pipe closed while reading the command
105 105 ... bprint(b'server exit code =', b'%d' % server.wait())
106 106 server exit code = 1
107 107
108 108 >>> from hgclient import check, readchannel, runcommand, stringio
109 109 >>> @check
110 110 ... def serverinput(server):
111 111 ... readchannel(server)
112 112 ...
113 113 ... patch = b"""
114 114 ... # HG changeset patch
115 115 ... # User test
116 116 ... # Date 0 0
117 117 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857
118 118 ... # Parent 0000000000000000000000000000000000000000
119 119 ... 1
120 120 ...
121 121 ... diff -r 000000000000 -r c103a3dec114 a
122 122 ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000
123 123 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000
124 124 ... @@ -0,0 +1,1 @@
125 125 ... +1
126 126 ... """
127 127 ...
128 128 ... runcommand(server, [b'import', b'-'], input=stringio(patch))
129 129 ... runcommand(server, [b'log'])
130 130 *** runcommand import -
131 131 applying patch from stdin
132 132 *** runcommand log
133 133 changeset: 0:eff892de26ec
134 134 tag: tip
135 135 user: test
136 136 date: Thu Jan 01 00:00:00 1970 +0000
137 137 summary: 1
138 138
139 139
140 140 check strict parsing of early options:
141 141
142 142 >>> import os
143 143 >>> from hgclient import check, readchannel, runcommand
144 144 >>> os.environ['HGPLAIN'] = '+strictflags'
145 145 >>> @check
146 146 ... def cwd(server):
147 147 ... readchannel(server)
148 148 ... runcommand(server, [b'log', b'-b', b'--config=alias.log=!echo pwned',
149 149 ... b'default'])
150 150 *** runcommand log -b --config=alias.log=!echo pwned default
151 151 abort: unknown revision '--config=alias.log=!echo pwned'!
152 152 [255]
153 153
154 154 check that "histedit --commands=-" can read rules from the input channel:
155 155
156 156 >>> from hgclient import check, readchannel, runcommand, stringio
157 157 >>> @check
158 158 ... def serverinput(server):
159 159 ... readchannel(server)
160 160 ... rules = b'pick eff892de26ec\n'
161 161 ... runcommand(server, [b'histedit', b'0', b'--commands=-',
162 162 ... b'--config', b'extensions.histedit='],
163 163 ... input=stringio(rules))
164 164 *** runcommand histedit 0 --commands=- --config extensions.histedit=
165 165
166 166 check that --cwd doesn't persist between requests:
167 167
168 168 $ mkdir foo
169 169 $ touch foo/bar
170 170 >>> from hgclient import check, readchannel, runcommand
171 171 >>> @check
172 172 ... def cwd(server):
173 173 ... readchannel(server)
174 174 ... runcommand(server, [b'--cwd', b'foo', b'st', b'bar'])
175 175 ... runcommand(server, [b'st', b'foo/bar'])
176 176 *** runcommand --cwd foo st bar
177 177 ? bar
178 178 *** runcommand st foo/bar
179 179 ? foo/bar
180 180
181 181 $ rm foo/bar
182 182
183 183
184 184 check that local configs for the cached repo aren't inherited when -R is used:
185 185
186 186 $ cat <<EOF >> .hg/hgrc
187 187 > [ui]
188 188 > foo = bar
189 189 > EOF
190 190
191 191 #if no-extraextensions
192 192
193 193 >>> from hgclient import check, readchannel, runcommand, sep
194 194 >>> @check
195 195 ... def localhgrc(server):
196 196 ... readchannel(server)
197 197 ...
198 198 ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should
199 199 ... # show it
200 200 ... runcommand(server, [b'showconfig'], outfilter=sep)
201 201 ...
202 202 ... # but not for this repo
203 203 ... runcommand(server, [b'init', b'foo'])
204 204 ... runcommand(server, [b'-R', b'foo', b'showconfig', b'ui', b'defaults'])
205 205 *** runcommand showconfig
206 206 bundle.mainreporoot=$TESTTMP/repo
207 207 devel.all-warnings=true
208 208 devel.default-date=0 0
209 209 extensions.fsmonitor= (fsmonitor !)
210 210 largefiles.usercache=$TESTTMP/.cache/largefiles
211 211 lfs.usercache=$TESTTMP/.cache/lfs
212 212 ui.slash=True
213 213 ui.interactive=False
214 214 ui.merge=internal:merge
215 215 ui.mergemarkers=detailed
216 216 ui.foo=bar
217 217 ui.nontty=true
218 218 web.address=localhost
219 219 web\.ipv6=(?:True|False) (re)
220 220 web.server-header=testing stub value
221 221 *** runcommand init foo
222 222 *** runcommand -R foo showconfig ui defaults
223 223 ui.slash=True
224 224 ui.interactive=False
225 225 ui.merge=internal:merge
226 226 ui.mergemarkers=detailed
227 227 ui.nontty=true
228 228 #endif
229 229
230 230 $ rm -R foo
231 231
232 232 #if windows
233 233 $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH"
234 234 #else
235 235 $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH"
236 236 #endif
237 237
238 238 $ cat <<EOF > hook.py
239 239 > import sys
240 240 > from hgclient import bprint
241 241 > def hook(**args):
242 242 > bprint(b'hook talking')
243 243 > bprint(b'now try to read something: %r' % sys.stdin.read())
244 244 > EOF
245 245
246 246 >>> from hgclient import check, readchannel, runcommand, stringio
247 247 >>> @check
248 248 ... def hookoutput(server):
249 249 ... readchannel(server)
250 250 ... runcommand(server, [b'--config',
251 251 ... b'hooks.pre-identify=python:hook.hook',
252 252 ... b'id'],
253 253 ... input=stringio(b'some input'))
254 254 *** runcommand --config hooks.pre-identify=python:hook.hook id
255 255 eff892de26ec tip
256 256 hook talking
257 257 now try to read something: ''
258 258
259 259 Clean hook cached version
260 260 $ rm hook.py*
261 261 $ rm -Rf __pycache__
262 262
263 263 $ echo a >> a
264 264 >>> import os
265 265 >>> from hgclient import check, readchannel, runcommand
266 266 >>> @check
267 267 ... def outsidechanges(server):
268 268 ... readchannel(server)
269 269 ... runcommand(server, [b'status'])
270 270 ... os.system('hg ci -Am2')
271 271 ... runcommand(server, [b'tip'])
272 272 ... runcommand(server, [b'status'])
273 273 *** runcommand status
274 274 M a
275 275 *** runcommand tip
276 276 changeset: 1:d3a0a68be6de
277 277 tag: tip
278 278 user: test
279 279 date: Thu Jan 01 00:00:00 1970 +0000
280 280 summary: 2
281 281
282 282 *** runcommand status
283 283
284 284 >>> import os
285 285 >>> from hgclient import bprint, check, readchannel, runcommand
286 286 >>> @check
287 287 ... def bookmarks(server):
288 288 ... readchannel(server)
289 289 ... runcommand(server, [b'bookmarks'])
290 290 ...
291 291 ... # changes .hg/bookmarks
292 292 ... os.system('hg bookmark -i bm1')
293 293 ... os.system('hg bookmark -i bm2')
294 294 ... runcommand(server, [b'bookmarks'])
295 295 ...
296 296 ... # changes .hg/bookmarks.current
297 297 ... os.system('hg upd bm1 -q')
298 298 ... runcommand(server, [b'bookmarks'])
299 299 ...
300 300 ... runcommand(server, [b'bookmarks', b'bm3'])
301 301 ... f = open('a', 'ab')
302 302 ... f.write(b'a\n') and None
303 303 ... f.close()
304 304 ... runcommand(server, [b'commit', b'-Amm'])
305 305 ... runcommand(server, [b'bookmarks'])
306 306 ... bprint(b'')
307 307 *** runcommand bookmarks
308 308 no bookmarks set
309 309 *** runcommand bookmarks
310 310 bm1 1:d3a0a68be6de
311 311 bm2 1:d3a0a68be6de
312 312 *** runcommand bookmarks
313 313 * bm1 1:d3a0a68be6de
314 314 bm2 1:d3a0a68be6de
315 315 *** runcommand bookmarks bm3
316 316 *** runcommand commit -Amm
317 317 *** runcommand bookmarks
318 318 bm1 1:d3a0a68be6de
319 319 bm2 1:d3a0a68be6de
320 320 * bm3 2:aef17e88f5f0
321 321
322 322
323 323 >>> import os
324 324 >>> from hgclient import check, readchannel, runcommand
325 325 >>> @check
326 326 ... def tagscache(server):
327 327 ... readchannel(server)
328 328 ... runcommand(server, [b'id', b'-t', b'-r', b'0'])
329 329 ... os.system('hg tag -r 0 foo')
330 330 ... runcommand(server, [b'id', b'-t', b'-r', b'0'])
331 331 *** runcommand id -t -r 0
332 332
333 333 *** runcommand id -t -r 0
334 334 foo
335 335
336 336 >>> import os
337 337 >>> from hgclient import check, readchannel, runcommand
338 338 >>> @check
339 339 ... def setphase(server):
340 340 ... readchannel(server)
341 341 ... runcommand(server, [b'phase', b'-r', b'.'])
342 342 ... os.system('hg phase -r . -p')
343 343 ... runcommand(server, [b'phase', b'-r', b'.'])
344 344 *** runcommand phase -r .
345 345 3: draft
346 346 *** runcommand phase -r .
347 347 3: public
348 348
349 349 $ echo a >> a
350 350 >>> from hgclient import bprint, check, readchannel, runcommand
351 351 >>> @check
352 352 ... def rollback(server):
353 353 ... readchannel(server)
354 354 ... runcommand(server, [b'phase', b'-r', b'.', b'-p'])
355 355 ... runcommand(server, [b'commit', b'-Am.'])
356 356 ... runcommand(server, [b'rollback'])
357 357 ... runcommand(server, [b'phase', b'-r', b'.'])
358 358 ... bprint(b'')
359 359 *** runcommand phase -r . -p
360 360 no phases changed
361 361 *** runcommand commit -Am.
362 362 *** runcommand rollback
363 363 repository tip rolled back to revision 3 (undo commit)
364 364 working directory now based on revision 3
365 365 *** runcommand phase -r .
366 366 3: public
367 367
368 368
369 369 >>> import os
370 370 >>> from hgclient import check, readchannel, runcommand
371 371 >>> @check
372 372 ... def branch(server):
373 373 ... readchannel(server)
374 374 ... runcommand(server, [b'branch'])
375 375 ... os.system('hg branch foo')
376 376 ... runcommand(server, [b'branch'])
377 377 ... os.system('hg branch default')
378 378 *** runcommand branch
379 379 default
380 380 marked working directory as branch foo
381 381 (branches are permanent and global, did you want a bookmark?)
382 382 *** runcommand branch
383 383 foo
384 384 marked working directory as branch default
385 385 (branches are permanent and global, did you want a bookmark?)
386 386
387 387 $ touch .hgignore
388 388 >>> import os
389 389 >>> from hgclient import bprint, check, readchannel, runcommand
390 390 >>> @check
391 391 ... def hgignore(server):
392 392 ... readchannel(server)
393 393 ... runcommand(server, [b'commit', b'-Am.'])
394 394 ... f = open('ignored-file', 'ab')
395 395 ... f.write(b'') and None
396 396 ... f.close()
397 397 ... f = open('.hgignore', 'ab')
398 398 ... f.write(b'ignored-file')
399 399 ... f.close()
400 400 ... runcommand(server, [b'status', b'-i', b'-u'])
401 401 ... bprint(b'')
402 402 *** runcommand commit -Am.
403 403 adding .hgignore
404 404 *** runcommand status -i -u
405 405 I ignored-file
406 406
407 407
408 408 cache of non-public revisions should be invalidated on repository change
409 409 (issue4855):
410 410
411 411 >>> import os
412 412 >>> from hgclient import bprint, check, readchannel, runcommand
413 413 >>> @check
414 414 ... def phasesetscacheaftercommit(server):
415 415 ... readchannel(server)
416 416 ... # load _phasecache._phaserevs and _phasesets
417 417 ... runcommand(server, [b'log', b'-qr', b'draft()'])
418 418 ... # create draft commits by another process
419 419 ... for i in range(5, 7):
420 420 ... f = open('a', 'ab')
421 421 ... f.seek(0, os.SEEK_END)
422 422 ... f.write(b'a\n') and None
423 423 ... f.close()
424 424 ... os.system('hg commit -Aqm%d' % i)
425 425 ... # new commits should be listed as draft revisions
426 426 ... runcommand(server, [b'log', b'-qr', b'draft()'])
427 427 ... bprint(b'')
428 428 *** runcommand log -qr draft()
429 429 4:7966c8e3734d
430 430 *** runcommand log -qr draft()
431 431 4:7966c8e3734d
432 432 5:41f6602d1c4f
433 433 6:10501e202c35
434 434
435 435
436 436 >>> import os
437 437 >>> from hgclient import bprint, check, readchannel, runcommand
438 438 >>> @check
439 439 ... def phasesetscacheafterstrip(server):
440 440 ... readchannel(server)
441 441 ... # load _phasecache._phaserevs and _phasesets
442 442 ... runcommand(server, [b'log', b'-qr', b'draft()'])
443 443 ... # strip cached revisions by another process
444 444 ... os.system('hg --config extensions.strip= strip -q 5')
445 445 ... # shouldn't abort by "unknown revision '6'"
446 446 ... runcommand(server, [b'log', b'-qr', b'draft()'])
447 447 ... bprint(b'')
448 448 *** runcommand log -qr draft()
449 449 4:7966c8e3734d
450 450 5:41f6602d1c4f
451 451 6:10501e202c35
452 452 *** runcommand log -qr draft()
453 453 4:7966c8e3734d
454 454
455 455
456 456 cache of phase roots should be invalidated on strip (issue3827):
457 457
458 458 >>> import os
459 459 >>> from hgclient import check, readchannel, runcommand, sep
460 460 >>> @check
461 461 ... def phasecacheafterstrip(server):
462 462 ... readchannel(server)
463 463 ...
464 464 ... # create new head, 5:731265503d86
465 465 ... runcommand(server, [b'update', b'-C', b'0'])
466 466 ... f = open('a', 'ab')
467 467 ... f.write(b'a\n') and None
468 468 ... f.close()
469 469 ... runcommand(server, [b'commit', b'-Am.', b'a'])
470 470 ... runcommand(server, [b'log', b'-Gq'])
471 471 ...
472 472 ... # make it public; draft marker moves to 4:7966c8e3734d
473 473 ... runcommand(server, [b'phase', b'-p', b'.'])
474 474 ... # load _phasecache.phaseroots
475 475 ... runcommand(server, [b'phase', b'.'], outfilter=sep)
476 476 ...
477 477 ... # strip 1::4 outside server
478 478 ... os.system('hg -q --config extensions.mq= strip 1')
479 479 ...
480 480 ... # shouldn't raise "7966c8e3734d: no node!"
481 481 ... runcommand(server, [b'branches'])
482 482 *** runcommand update -C 0
483 483 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
484 484 (leaving bookmark bm3)
485 485 *** runcommand commit -Am. a
486 486 created new head
487 487 *** runcommand log -Gq
488 488 @ 5:731265503d86
489 489 |
490 490 | o 4:7966c8e3734d
491 491 | |
492 492 | o 3:b9b85890c400
493 493 | |
494 494 | o 2:aef17e88f5f0
495 495 | |
496 496 | o 1:d3a0a68be6de
497 497 |/
498 498 o 0:eff892de26ec
499 499
500 500 *** runcommand phase -p .
501 501 *** runcommand phase .
502 502 5: public
503 503 *** runcommand branches
504 504 default 1:731265503d86
505 505
506 506 in-memory cache must be reloaded if transaction is aborted. otherwise
507 507 changelog and manifest would have invalid node:
508 508
509 509 $ echo a >> a
510 510 >>> from hgclient import check, readchannel, runcommand
511 511 >>> @check
512 512 ... def txabort(server):
513 513 ... readchannel(server)
514 514 ... runcommand(server, [b'commit', b'--config', b'hooks.pretxncommit=false',
515 515 ... b'-mfoo'])
516 516 ... runcommand(server, [b'verify'])
517 517 *** runcommand commit --config hooks.pretxncommit=false -mfoo
518 518 transaction abort!
519 519 rollback completed
520 520 abort: pretxncommit hook exited with status 1
521 521 [255]
522 522 *** runcommand verify
523 523 checking changesets
524 524 checking manifests
525 525 crosschecking files in changesets and manifests
526 526 checking files
527 527 checked 2 changesets with 2 changes to 1 files
528 528 $ hg revert --no-backup -aq
529 529
530 530 $ cat >> .hg/hgrc << EOF
531 531 > [experimental]
532 532 > evolution.createmarkers=True
533 533 > EOF
534 534
535 535 >>> import os
536 536 >>> from hgclient import check, readchannel, runcommand
537 537 >>> @check
538 538 ... def obsolete(server):
539 539 ... readchannel(server)
540 540 ...
541 541 ... runcommand(server, [b'up', b'null'])
542 542 ... runcommand(server, [b'phase', b'-df', b'tip'])
543 543 ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
544 544 ... if os.name == 'nt':
545 545 ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
546 546 ... os.system(cmd)
547 547 ... runcommand(server, [b'log', b'--hidden'])
548 548 ... runcommand(server, [b'log'])
549 549 *** runcommand up null
550 550 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
551 551 *** runcommand phase -df tip
552 552 1 new obsolescence markers
553 553 obsoleted 1 changesets
554 554 *** runcommand log --hidden
555 555 changeset: 1:731265503d86
556 556 tag: tip
557 557 user: test
558 558 date: Thu Jan 01 00:00:00 1970 +0000
559 559 obsolete: pruned
560 560 summary: .
561 561
562 562 changeset: 0:eff892de26ec
563 563 bookmark: bm1
564 564 bookmark: bm2
565 565 bookmark: bm3
566 566 user: test
567 567 date: Thu Jan 01 00:00:00 1970 +0000
568 568 summary: 1
569 569
570 570 *** runcommand log
571 571 changeset: 0:eff892de26ec
572 572 bookmark: bm1
573 573 bookmark: bm2
574 574 bookmark: bm3
575 575 tag: tip
576 576 user: test
577 577 date: Thu Jan 01 00:00:00 1970 +0000
578 578 summary: 1
579 579
580 580
581 581 $ cat <<EOF >> .hg/hgrc
582 582 > [extensions]
583 583 > mq =
584 584 > EOF
585 585
586 586 >>> import os
587 587 >>> from hgclient import check, readchannel, runcommand
588 588 >>> @check
589 589 ... def mqoutsidechanges(server):
590 590 ... readchannel(server)
591 591 ...
592 592 ... # load repo.mq
593 593 ... runcommand(server, [b'qapplied'])
594 594 ... os.system('hg qnew 0.diff')
595 595 ... # repo.mq should be invalidated
596 596 ... runcommand(server, [b'qapplied'])
597 597 ...
598 598 ... runcommand(server, [b'qpop', b'--all'])
599 599 ... os.system('hg qqueue --create foo')
600 600 ... # repo.mq should be recreated to point to new queue
601 601 ... runcommand(server, [b'qqueue', b'--active'])
602 602 *** runcommand qapplied
603 603 *** runcommand qapplied
604 604 0.diff
605 605 *** runcommand qpop --all
606 606 popping 0.diff
607 607 patch queue now empty
608 608 *** runcommand qqueue --active
609 609 foo
610 610
611 611 $ cat <<'EOF' > ../dbgui.py
612 612 > import os
613 613 > import sys
614 614 > from mercurial import commands, registrar
615 615 > cmdtable = {}
616 616 > command = registrar.command(cmdtable)
617 617 > @command(b"debuggetpass", norepo=True)
618 618 > def debuggetpass(ui):
619 619 > ui.write(b"%s\n" % ui.getpass())
620 620 > @command(b"debugprompt", norepo=True)
621 621 > def debugprompt(ui):
622 622 > ui.write(b"%s\n" % ui.prompt(b"prompt:"))
623 623 > @command(b"debugpromptchoice", norepo=True)
624 624 > def debugpromptchoice(ui):
625 625 > msg = b"promptchoice (y/n)? $$ &Yes $$ &No"
626 626 > ui.write(b"%d\n" % ui.promptchoice(msg))
627 627 > @command(b"debugreadstdin", norepo=True)
628 628 > def debugreadstdin(ui):
629 629 > ui.write(b"read: %r\n" % sys.stdin.read(1))
630 630 > @command(b"debugwritestdout", norepo=True)
631 631 > def debugwritestdout(ui):
632 632 > os.write(1, b"low-level stdout fd and\n")
633 633 > sys.stdout.write("stdout should be redirected to stderr\n")
634 634 > sys.stdout.flush()
635 635 > EOF
636 636 $ cat <<EOF >> .hg/hgrc
637 637 > [extensions]
638 638 > dbgui = ../dbgui.py
639 639 > EOF
640 640
641 641 >>> from hgclient import check, readchannel, runcommand, stringio
642 642 >>> @check
643 643 ... def getpass(server):
644 644 ... readchannel(server)
645 645 ... runcommand(server, [b'debuggetpass', b'--config',
646 646 ... b'ui.interactive=True'],
647 647 ... input=stringio(b'1234\n'))
648 648 ... runcommand(server, [b'debuggetpass', b'--config',
649 649 ... b'ui.interactive=True'],
650 650 ... input=stringio(b'\n'))
651 651 ... runcommand(server, [b'debuggetpass', b'--config',
652 652 ... b'ui.interactive=True'],
653 653 ... input=stringio(b''))
654 654 ... runcommand(server, [b'debugprompt', b'--config',
655 655 ... b'ui.interactive=True'],
656 656 ... input=stringio(b'5678\n'))
657 ... runcommand(server, [b'debugprompt', b'--config',
658 ... b'ui.interactive=True'],
659 ... input=stringio(b'\nremainder\nshould\nnot\nbe\nread\n'))
657 660 ... runcommand(server, [b'debugreadstdin'])
658 661 ... runcommand(server, [b'debugwritestdout'])
659 662 *** runcommand debuggetpass --config ui.interactive=True
660 663 password: 1234
661 664 *** runcommand debuggetpass --config ui.interactive=True
662 665 password:
663 666 *** runcommand debuggetpass --config ui.interactive=True
664 667 password: abort: response expected
665 668 [255]
666 669 *** runcommand debugprompt --config ui.interactive=True
667 670 prompt: 5678
671 *** runcommand debugprompt --config ui.interactive=True
672 prompt: y
668 673 *** runcommand debugreadstdin
669 674 read: ''
670 675 *** runcommand debugwritestdout
671 676 low-level stdout fd and
672 677 stdout should be redirected to stderr
673 678
674 679
675 680 run commandserver in commandserver, which is silly but should work:
676 681
677 682 >>> from hgclient import bprint, check, readchannel, runcommand, stringio
678 683 >>> @check
679 684 ... def nested(server):
680 685 ... bprint(b'%c, %r' % readchannel(server))
681 686 ... class nestedserver(object):
682 687 ... stdin = stringio(b'getencoding\n')
683 688 ... stdout = stringio()
684 689 ... runcommand(server, [b'serve', b'--cmdserver', b'pipe'],
685 690 ... output=nestedserver.stdout, input=nestedserver.stdin)
686 691 ... nestedserver.stdout.seek(0)
687 692 ... bprint(b'%c, %r' % readchannel(nestedserver)) # hello
688 693 ... bprint(b'%c, %r' % readchannel(nestedserver)) # getencoding
689 694 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
690 695 *** runcommand serve --cmdserver pipe
691 696 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
692 697 r, '*' (glob)
693 698
694 699
695 700 start without repository:
696 701
697 702 $ cd ..
698 703
699 704 >>> from hgclient import bprint, check, readchannel, runcommand
700 705 >>> @check
701 706 ... def hellomessage(server):
702 707 ... ch, data = readchannel(server)
703 708 ... bprint(b'%c, %r' % (ch, data))
704 709 ... # run an arbitrary command to make sure the next thing the server
705 710 ... # sends isn't part of the hello message
706 711 ... runcommand(server, [b'id'])
707 712 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
708 713 *** runcommand id
709 714 abort: there is no Mercurial repository here (.hg not found)
710 715 [255]
711 716
712 717 >>> from hgclient import check, readchannel, runcommand
713 718 >>> @check
714 719 ... def startwithoutrepo(server):
715 720 ... readchannel(server)
716 721 ... runcommand(server, [b'init', b'repo2'])
717 722 ... runcommand(server, [b'id', b'-R', b'repo2'])
718 723 *** runcommand init repo2
719 724 *** runcommand id -R repo2
720 725 000000000000 tip
721 726
722 727
723 728 don't fall back to cwd if invalid -R path is specified (issue4805):
724 729
725 730 $ cd repo
726 731 $ hg serve --cmdserver pipe -R ../nonexistent
727 732 abort: repository ../nonexistent not found!
728 733 [255]
729 734 $ cd ..
730 735
731 736
732 737 structured message channel:
733 738
734 739 $ cat <<'EOF' >> repo2/.hg/hgrc
735 740 > [ui]
736 741 > # server --config should precede repository option
737 742 > message-output = stdio
738 743 > EOF
739 744
740 745 >>> from hgclient import bprint, checkwith, readchannel, runcommand
741 746 >>> @checkwith(extraargs=[b'--config', b'ui.message-output=channel',
742 747 ... b'--config', b'cmdserver.message-encodings=foo cbor'])
743 748 ... def verify(server):
744 749 ... _ch, data = readchannel(server)
745 750 ... bprint(data)
746 751 ... runcommand(server, [b'-R', b'repo2', b'verify'])
747 752 capabilities: getencoding runcommand
748 753 encoding: ascii
749 754 message-encoding: cbor
750 755 pid: * (glob)
751 756 pgid: * (glob) (no-windows !)
752 757 *** runcommand -R repo2 verify
753 758 message: '\xa2DdataTchecking changesets\nDtypeFstatus'
754 759 message: '\xa6Ditem@Cpos\xf6EtopicHcheckingEtotal\xf6DtypeHprogressDunit@'
755 760 message: '\xa2DdataSchecking manifests\nDtypeFstatus'
756 761 message: '\xa6Ditem@Cpos\xf6EtopicHcheckingEtotal\xf6DtypeHprogressDunit@'
757 762 message: '\xa2DdataX0crosschecking files in changesets and manifests\nDtypeFstatus'
758 763 message: '\xa6Ditem@Cpos\xf6EtopicMcrosscheckingEtotal\xf6DtypeHprogressDunit@'
759 764 message: '\xa2DdataOchecking files\nDtypeFstatus'
760 765 message: '\xa6Ditem@Cpos\xf6EtopicHcheckingEtotal\xf6DtypeHprogressDunit@'
761 766 message: '\xa2DdataX/checked 0 changesets with 0 changes to 0 files\nDtypeFstatus'
762 767
763 768 >>> from hgclient import checkwith, readchannel, runcommand, stringio
764 769 >>> @checkwith(extraargs=[b'--config', b'ui.message-output=channel',
765 770 ... b'--config', b'cmdserver.message-encodings=cbor',
766 771 ... b'--config', b'extensions.dbgui=dbgui.py'])
767 772 ... def prompt(server):
768 773 ... readchannel(server)
769 774 ... interactive = [b'--config', b'ui.interactive=True']
770 775 ... runcommand(server, [b'debuggetpass'] + interactive,
771 776 ... input=stringio(b'1234\n'))
772 777 ... runcommand(server, [b'debugprompt'] + interactive,
773 778 ... input=stringio(b'5678\n'))
774 779 ... runcommand(server, [b'debugpromptchoice'] + interactive,
775 780 ... input=stringio(b'n\n'))
776 781 *** runcommand debuggetpass --config ui.interactive=True
777 782 message: '\xa3DdataJpassword: Hpassword\xf5DtypeFprompt'
778 783 1234
779 784 *** runcommand debugprompt --config ui.interactive=True
780 785 message: '\xa3DdataGprompt:GdefaultAyDtypeFprompt'
781 786 5678
782 787 *** runcommand debugpromptchoice --config ui.interactive=True
783 788 message: '\xa4Gchoices\x82\x82AyCYes\x82AnBNoDdataTpromptchoice (y/n)? GdefaultAyDtypeFprompt'
784 789 1
785 790
786 791 bad message encoding:
787 792
788 793 $ hg serve --cmdserver pipe --config ui.message-output=channel
789 794 abort: no supported message encodings:
790 795 [255]
791 796 $ hg serve --cmdserver pipe --config ui.message-output=channel \
792 797 > --config cmdserver.message-encodings='foo bar'
793 798 abort: no supported message encodings: foo bar
794 799 [255]
795 800
796 801 unix domain socket:
797 802
798 803 $ cd repo
799 804 $ hg update -q
800 805
801 806 #if unix-socket unix-permissions
802 807
803 808 >>> from hgclient import bprint, check, readchannel, runcommand, stringio, unixserver
804 809 >>> server = unixserver(b'.hg/server.sock', b'.hg/server.log')
805 810 >>> def hellomessage(conn):
806 811 ... ch, data = readchannel(conn)
807 812 ... bprint(b'%c, %r' % (ch, data))
808 813 ... runcommand(conn, [b'id'])
809 814 >>> check(hellomessage, server.connect)
810 815 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
811 816 *** runcommand id
812 817 eff892de26ec tip bm1/bm2/bm3
813 818 >>> def unknowncommand(conn):
814 819 ... readchannel(conn)
815 820 ... conn.stdin.write(b'unknowncommand\n')
816 821 >>> check(unknowncommand, server.connect) # error sent to server.log
817 822 >>> def serverinput(conn):
818 823 ... readchannel(conn)
819 824 ... patch = b"""
820 825 ... # HG changeset patch
821 826 ... # User test
822 827 ... # Date 0 0
823 828 ... 2
824 829 ...
825 830 ... diff -r eff892de26ec -r 1ed24be7e7a0 a
826 831 ... --- a/a
827 832 ... +++ b/a
828 833 ... @@ -1,1 +1,2 @@
829 834 ... 1
830 835 ... +2
831 836 ... """
832 837 ... runcommand(conn, [b'import', b'-'], input=stringio(patch))
833 838 ... runcommand(conn, [b'log', b'-rtip', b'-q'])
834 839 >>> check(serverinput, server.connect)
835 840 *** runcommand import -
836 841 applying patch from stdin
837 842 *** runcommand log -rtip -q
838 843 2:1ed24be7e7a0
839 844 >>> server.shutdown()
840 845
841 846 $ cat .hg/server.log
842 847 listening at .hg/server.sock
843 848 abort: unknown command unknowncommand
844 849 killed!
845 850 $ rm .hg/server.log
846 851
847 852 if server crashed before hello, traceback will be sent to 'e' channel as
848 853 last ditch:
849 854
850 855 $ cat <<'EOF' > ../earlycrasher.py
851 856 > from mercurial import commandserver, extensions
852 857 > def _serverequest(orig, ui, repo, conn, createcmdserver, prereposetups):
853 858 > def createcmdserver(*args, **kwargs):
854 859 > raise Exception('crash')
855 860 > return orig(ui, repo, conn, createcmdserver, prereposetups)
856 861 > def extsetup(ui):
857 862 > extensions.wrapfunction(commandserver, b'_serverequest', _serverequest)
858 863 > EOF
859 864 $ cat <<EOF >> .hg/hgrc
860 865 > [extensions]
861 866 > earlycrasher = ../earlycrasher.py
862 867 > EOF
863 868 >>> from hgclient import bprint, check, readchannel, unixserver
864 869 >>> server = unixserver(b'.hg/server.sock', b'.hg/server.log')
865 870 >>> def earlycrash(conn):
866 871 ... while True:
867 872 ... try:
868 873 ... ch, data = readchannel(conn)
869 874 ... for l in data.splitlines(True):
870 875 ... if not l.startswith(b' '):
871 876 ... bprint(b'%c, %r' % (ch, l))
872 877 ... except EOFError:
873 878 ... break
874 879 >>> check(earlycrash, server.connect)
875 880 e, 'Traceback (most recent call last):\n'
876 881 e, 'Exception: crash\n'
877 882 >>> server.shutdown()
878 883
879 884 $ cat .hg/server.log | grep -v '^ '
880 885 listening at .hg/server.sock
881 886 Traceback (most recent call last):
882 887 Exception: crash
883 888 killed!
884 889 #endif
885 890 #if no-unix-socket
886 891
887 892 $ hg serve --cmdserver unix -a .hg/server.sock
888 893 abort: unsupported platform
889 894 [255]
890 895
891 896 #endif
892 897
893 898 $ cd ..
894 899
895 900 Test that accessing to invalid changelog cache is avoided at
896 901 subsequent operations even if repo object is reused even after failure
897 902 of transaction (see 0a7610758c42 also)
898 903
899 904 "hg log" after failure of transaction is needed to detect invalid
900 905 cache in repoview: this can't detect by "hg verify" only.
901 906
902 907 Combination of "finalization" and "empty-ness of changelog" (2 x 2 =
903 908 4) are tested, because '00changelog.i' are differently changed in each
904 909 cases.
905 910
906 911 $ cat > $TESTTMP/failafterfinalize.py <<EOF
907 912 > # extension to abort transaction after finalization forcibly
908 913 > from mercurial import commands, error, extensions, lock as lockmod
909 914 > from mercurial import registrar
910 915 > cmdtable = {}
911 916 > command = registrar.command(cmdtable)
912 917 > configtable = {}
913 918 > configitem = registrar.configitem(configtable)
914 919 > configitem(b'failafterfinalize', b'fail',
915 920 > default=None,
916 921 > )
917 922 > def fail(tr):
918 923 > raise error.Abort(b'fail after finalization')
919 924 > def reposetup(ui, repo):
920 925 > class failrepo(repo.__class__):
921 926 > def commitctx(self, ctx, error=False, origctx=None):
922 927 > if self.ui.configbool(b'failafterfinalize', b'fail'):
923 928 > # 'sorted()' by ASCII code on category names causes
924 929 > # invoking 'fail' after finalization of changelog
925 930 > # using "'cl-%i' % id(self)" as category name
926 931 > self.currenttransaction().addfinalize(b'zzzzzzzz', fail)
927 932 > return super(failrepo, self).commitctx(ctx, error, origctx)
928 933 > repo.__class__ = failrepo
929 934 > EOF
930 935
931 936 $ hg init repo3
932 937 $ cd repo3
933 938
934 939 $ cat <<EOF >> $HGRCPATH
935 940 > [ui]
936 941 > logtemplate = {rev} {desc|firstline} ({files})\n
937 942 >
938 943 > [extensions]
939 944 > failafterfinalize = $TESTTMP/failafterfinalize.py
940 945 > EOF
941 946
942 947 - test failure with "empty changelog"
943 948
944 949 $ echo foo > foo
945 950 $ hg add foo
946 951
947 952 (failure before finalization)
948 953
949 954 >>> from hgclient import check, readchannel, runcommand
950 955 >>> @check
951 956 ... def abort(server):
952 957 ... readchannel(server)
953 958 ... runcommand(server, [b'commit',
954 959 ... b'--config', b'hooks.pretxncommit=false',
955 960 ... b'-mfoo'])
956 961 ... runcommand(server, [b'log'])
957 962 ... runcommand(server, [b'verify', b'-q'])
958 963 *** runcommand commit --config hooks.pretxncommit=false -mfoo
959 964 transaction abort!
960 965 rollback completed
961 966 abort: pretxncommit hook exited with status 1
962 967 [255]
963 968 *** runcommand log
964 969 *** runcommand verify -q
965 970
966 971 (failure after finalization)
967 972
968 973 >>> from hgclient import check, readchannel, runcommand
969 974 >>> @check
970 975 ... def abort(server):
971 976 ... readchannel(server)
972 977 ... runcommand(server, [b'commit',
973 978 ... b'--config', b'failafterfinalize.fail=true',
974 979 ... b'-mfoo'])
975 980 ... runcommand(server, [b'log'])
976 981 ... runcommand(server, [b'verify', b'-q'])
977 982 *** runcommand commit --config failafterfinalize.fail=true -mfoo
978 983 transaction abort!
979 984 rollback completed
980 985 abort: fail after finalization
981 986 [255]
982 987 *** runcommand log
983 988 *** runcommand verify -q
984 989
985 990 - test failure with "not-empty changelog"
986 991
987 992 $ echo bar > bar
988 993 $ hg add bar
989 994 $ hg commit -mbar bar
990 995
991 996 (failure before finalization)
992 997
993 998 >>> from hgclient import check, readchannel, runcommand
994 999 >>> @check
995 1000 ... def abort(server):
996 1001 ... readchannel(server)
997 1002 ... runcommand(server, [b'commit',
998 1003 ... b'--config', b'hooks.pretxncommit=false',
999 1004 ... b'-mfoo', b'foo'])
1000 1005 ... runcommand(server, [b'log'])
1001 1006 ... runcommand(server, [b'verify', b'-q'])
1002 1007 *** runcommand commit --config hooks.pretxncommit=false -mfoo foo
1003 1008 transaction abort!
1004 1009 rollback completed
1005 1010 abort: pretxncommit hook exited with status 1
1006 1011 [255]
1007 1012 *** runcommand log
1008 1013 0 bar (bar)
1009 1014 *** runcommand verify -q
1010 1015
1011 1016 (failure after finalization)
1012 1017
1013 1018 >>> from hgclient import check, readchannel, runcommand
1014 1019 >>> @check
1015 1020 ... def abort(server):
1016 1021 ... readchannel(server)
1017 1022 ... runcommand(server, [b'commit',
1018 1023 ... b'--config', b'failafterfinalize.fail=true',
1019 1024 ... b'-mfoo', b'foo'])
1020 1025 ... runcommand(server, [b'log'])
1021 1026 ... runcommand(server, [b'verify', b'-q'])
1022 1027 *** runcommand commit --config failafterfinalize.fail=true -mfoo foo
1023 1028 transaction abort!
1024 1029 rollback completed
1025 1030 abort: fail after finalization
1026 1031 [255]
1027 1032 *** runcommand log
1028 1033 0 bar (bar)
1029 1034 *** runcommand verify -q
1030 1035
1031 1036 $ cd ..
1032 1037
1033 1038 Test symlink traversal over cached audited paths:
1034 1039 -------------------------------------------------
1035 1040
1036 1041 #if symlink
1037 1042
1038 1043 set up symlink hell
1039 1044
1040 1045 $ mkdir merge-symlink-out
1041 1046 $ hg init merge-symlink
1042 1047 $ cd merge-symlink
1043 1048 $ touch base
1044 1049 $ hg commit -qAm base
1045 1050 $ ln -s ../merge-symlink-out a
1046 1051 $ hg commit -qAm 'symlink a -> ../merge-symlink-out'
1047 1052 $ hg up -q 0
1048 1053 $ mkdir a
1049 1054 $ touch a/poisoned
1050 1055 $ hg commit -qAm 'file a/poisoned'
1051 1056 $ hg log -G -T '{rev}: {desc}\n'
1052 1057 @ 2: file a/poisoned
1053 1058 |
1054 1059 | o 1: symlink a -> ../merge-symlink-out
1055 1060 |/
1056 1061 o 0: base
1057 1062
1058 1063
1059 1064 try trivial merge after update: cache of audited paths should be discarded,
1060 1065 and the merge should fail (issue5628)
1061 1066
1062 1067 $ hg up -q null
1063 1068 >>> from hgclient import check, readchannel, runcommand
1064 1069 >>> @check
1065 1070 ... def merge(server):
1066 1071 ... readchannel(server)
1067 1072 ... # audit a/poisoned as a good path
1068 1073 ... runcommand(server, [b'up', b'-qC', b'2'])
1069 1074 ... runcommand(server, [b'up', b'-qC', b'1'])
1070 1075 ... # here a is a symlink, so a/poisoned is bad
1071 1076 ... runcommand(server, [b'merge', b'2'])
1072 1077 *** runcommand up -qC 2
1073 1078 *** runcommand up -qC 1
1074 1079 *** runcommand merge 2
1075 1080 abort: path 'a/poisoned' traverses symbolic link 'a'
1076 1081 [255]
1077 1082 $ ls ../merge-symlink-out
1078 1083
1079 1084 cache of repo.auditor should be discarded, so matcher would never traverse
1080 1085 symlinks:
1081 1086
1082 1087 $ hg up -qC 0
1083 1088 $ touch ../merge-symlink-out/poisoned
1084 1089 >>> from hgclient import check, readchannel, runcommand
1085 1090 >>> @check
1086 1091 ... def files(server):
1087 1092 ... readchannel(server)
1088 1093 ... runcommand(server, [b'up', b'-qC', b'2'])
1089 1094 ... # audit a/poisoned as a good path
1090 1095 ... runcommand(server, [b'files', b'a/poisoned'])
1091 1096 ... runcommand(server, [b'up', b'-qC', b'0'])
1092 1097 ... runcommand(server, [b'up', b'-qC', b'1'])
1093 1098 ... # here 'a' is a symlink, so a/poisoned should be warned
1094 1099 ... runcommand(server, [b'files', b'a/poisoned'])
1095 1100 *** runcommand up -qC 2
1096 1101 *** runcommand files a/poisoned
1097 1102 a/poisoned
1098 1103 *** runcommand up -qC 0
1099 1104 *** runcommand up -qC 1
1100 1105 *** runcommand files a/poisoned
1101 1106 abort: path 'a/poisoned' traverses symbolic link 'a'
1102 1107 [255]
1103 1108
1104 1109 $ cd ..
1105 1110
1106 1111 #endif
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now