##// END OF EJS Templates
phabricator: update the hashes in commit messages as they get submitted...
Matt Harbison -
r45995:1a5d3e55 default
parent child Browse files
Show More
This diff has been collapsed as it changes many lines, (1096 lines changed) Show them Hide them
@@ -0,0 +1,1096 b''
1 {
2 "version": 1,
3 "interactions": [
4 {
5 "response": {
6 "body": {
7 "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
8 },
9 "headers": {
10 "referrer-policy": [
11 "no-referrer"
12 ],
13 "x-xss-protection": [
14 "1; mode=block"
15 ],
16 "server": [
17 "Apache/2.4.10 (Debian)"
18 ],
19 "cache-control": [
20 "no-store"
21 ],
22 "date": [
23 "Mon, 24 Aug 2020 22:18:15 GMT"
24 ],
25 "transfer-encoding": [
26 "chunked"
27 ],
28 "expires": [
29 "Sat, 01 Jan 2000 00:00:00 GMT"
30 ],
31 "x-frame-options": [
32 "Deny"
33 ],
34 "content-type": [
35 "application/json"
36 ],
37 "x-content-type-options": [
38 "nosniff"
39 ],
40 "strict-transport-security": [
41 "max-age=0; includeSubdomains; preload"
42 ]
43 },
44 "status": {
45 "message": "OK",
46 "code": 200
47 }
48 },
49 "request": {
50 "headers": {
51 "content-length": [
52 "183"
53 ],
54 "accept": [
55 "application/mercurial-0.1"
56 ],
57 "host": [
58 "phab.mercurial-scm.org"
59 ],
60 "user-agent": [
61 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
62 ],
63 "content-type": [
64 "application/x-www-form-urlencoded"
65 ]
66 },
67 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D&output=json&__conduit__=1",
68 "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
69 "method": "POST"
70 }
71 },
72 {
73 "response": {
74 "body": {
75 "string": "{\"result\":{\"diffid\":22437,\"phid\":\"PHID-DIFF-q7y7rru5hbxnq2mtosrf\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/22437\\/\"},\"error_code\":null,\"error_info\":null}"
76 },
77 "headers": {
78 "referrer-policy": [
79 "no-referrer"
80 ],
81 "x-xss-protection": [
82 "1; mode=block"
83 ],
84 "server": [
85 "Apache/2.4.10 (Debian)"
86 ],
87 "cache-control": [
88 "no-store"
89 ],
90 "date": [
91 "Mon, 24 Aug 2020 22:18:15 GMT"
92 ],
93 "transfer-encoding": [
94 "chunked"
95 ],
96 "expires": [
97 "Sat, 01 Jan 2000 00:00:00 GMT"
98 ],
99 "x-frame-options": [
100 "Deny"
101 ],
102 "content-type": [
103 "application/json"
104 ],
105 "x-content-type-options": [
106 "nosniff"
107 ],
108 "strict-transport-security": [
109 "max-age=0; includeSubdomains; preload"
110 ]
111 },
112 "status": {
113 "message": "OK",
114 "code": 200
115 }
116 },
117 "request": {
118 "headers": {
119 "content-length": [
120 "1162"
121 ],
122 "accept": [
123 "application/mercurial-0.1"
124 ],
125 "host": [
126 "phab.mercurial-scm.org"
127 ],
128 "user-agent": [
129 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
130 ],
131 "content-type": [
132 "application/x-www-form-urlencoded"
133 ]
134 },
135 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-mod3%5Cn%2Bcontent%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json&__conduit__=1",
136 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
137 "method": "POST"
138 }
139 },
140 {
141 "response": {
142 "body": {
143 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
144 },
145 "headers": {
146 "referrer-policy": [
147 "no-referrer"
148 ],
149 "x-xss-protection": [
150 "1; mode=block"
151 ],
152 "server": [
153 "Apache/2.4.10 (Debian)"
154 ],
155 "cache-control": [
156 "no-store"
157 ],
158 "date": [
159 "Mon, 24 Aug 2020 22:18:16 GMT"
160 ],
161 "transfer-encoding": [
162 "chunked"
163 ],
164 "expires": [
165 "Sat, 01 Jan 2000 00:00:00 GMT"
166 ],
167 "connection": [
168 "close"
169 ],
170 "content-type": [
171 "application/json"
172 ],
173 "x-content-type-options": [
174 "nosniff"
175 ],
176 "x-frame-options": [
177 "Deny"
178 ],
179 "strict-transport-security": [
180 "max-age=0; includeSubdomains; preload"
181 ]
182 },
183 "status": {
184 "message": "OK",
185 "code": 200
186 }
187 },
188 "request": {
189 "headers": {
190 "content-length": [
191 "482"
192 ],
193 "accept": [
194 "application/mercurial-0.1"
195 ],
196 "host": [
197 "phab.mercurial-scm.org"
198 ],
199 "user-agent": [
200 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
201 ],
202 "content-type": [
203 "application/x-www-form-urlencoded"
204 ]
205 },
206 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+22437%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json&__conduit__=1",
207 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
208 "method": "POST"
209 }
210 },
211 {
212 "response": {
213 "body": {
214 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
215 },
216 "headers": {
217 "referrer-policy": [
218 "no-referrer"
219 ],
220 "x-xss-protection": [
221 "1; mode=block"
222 ],
223 "server": [
224 "Apache/2.4.10 (Debian)"
225 ],
226 "cache-control": [
227 "no-store"
228 ],
229 "date": [
230 "Mon, 24 Aug 2020 22:18:17 GMT"
231 ],
232 "transfer-encoding": [
233 "chunked"
234 ],
235 "expires": [
236 "Sat, 01 Jan 2000 00:00:00 GMT"
237 ],
238 "x-frame-options": [
239 "Deny"
240 ],
241 "content-type": [
242 "application/json"
243 ],
244 "x-content-type-options": [
245 "nosniff"
246 ],
247 "strict-transport-security": [
248 "max-age=0; includeSubdomains; preload"
249 ]
250 },
251 "status": {
252 "message": "OK",
253 "code": 200
254 }
255 },
256 "request": {
257 "headers": {
258 "content-length": [
259 "594"
260 ],
261 "accept": [
262 "application/mercurial-0.1"
263 ],
264 "host": [
265 "phab.mercurial-scm.org"
266 ],
267 "user-agent": [
268 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
269 ],
270 "content-type": [
271 "application/x-www-form-urlencoded"
272 ]
273 },
274 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+22437%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json&__conduit__=1",
275 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
276 "method": "POST"
277 }
278 },
279 {
280 "response": {
281 "body": {
282 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"base review (generate test for phabsend)\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"base review (generate test for phabsend)\"}]},\"error_code\":null,\"error_info\":null}"
283 },
284 "headers": {
285 "referrer-policy": [
286 "no-referrer"
287 ],
288 "x-xss-protection": [
289 "1; mode=block"
290 ],
291 "server": [
292 "Apache/2.4.10 (Debian)"
293 ],
294 "cache-control": [
295 "no-store"
296 ],
297 "date": [
298 "Mon, 24 Aug 2020 22:18:17 GMT"
299 ],
300 "transfer-encoding": [
301 "chunked"
302 ],
303 "expires": [
304 "Sat, 01 Jan 2000 00:00:00 GMT"
305 ],
306 "x-frame-options": [
307 "Deny"
308 ],
309 "content-type": [
310 "application/json"
311 ],
312 "x-content-type-options": [
313 "nosniff"
314 ],
315 "strict-transport-security": [
316 "max-age=0; includeSubdomains; preload"
317 ]
318 },
319 "status": {
320 "message": "OK",
321 "code": 200
322 }
323 },
324 "request": {
325 "headers": {
326 "content-length": [
327 "189"
328 ],
329 "accept": [
330 "application/mercurial-0.1"
331 ],
332 "host": [
333 "phab.mercurial-scm.org"
334 ],
335 "user-agent": [
336 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
337 ],
338 "content-type": [
339 "application/x-www-form-urlencoded"
340 ]
341 },
342 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22base+review+%28generate+test+for+phabsend%29%22%7D&output=json&__conduit__=1",
343 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
344 "method": "POST"
345 }
346 },
347 {
348 "response": {
349 "body": {
350 "string": "{\"result\":{\"object\":{\"id\":8945,\"phid\":\"PHID-DREV-suqt5s55kjw235uv2vcf\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-76klselssdel6vp\"},{\"phid\":\"PHID-XACT-DREV-atejrjnkqevgpnv\"},{\"phid\":\"PHID-XACT-DREV-wqkucxolugjm4yr\"},{\"phid\":\"PHID-XACT-DREV-pziu2ibzwaljzto\"},{\"phid\":\"PHID-XACT-DREV-k4o6ptid6jztdrx\"}]},\"error_code\":null,\"error_info\":null}"
351 },
352 "headers": {
353 "referrer-policy": [
354 "no-referrer"
355 ],
356 "x-xss-protection": [
357 "1; mode=block"
358 ],
359 "server": [
360 "Apache/2.4.10 (Debian)"
361 ],
362 "cache-control": [
363 "no-store"
364 ],
365 "date": [
366 "Mon, 24 Aug 2020 22:18:18 GMT"
367 ],
368 "transfer-encoding": [
369 "chunked"
370 ],
371 "expires": [
372 "Sat, 01 Jan 2000 00:00:00 GMT"
373 ],
374 "x-frame-options": [
375 "Deny"
376 ],
377 "content-type": [
378 "application/json"
379 ],
380 "x-content-type-options": [
381 "nosniff"
382 ],
383 "strict-transport-security": [
384 "max-age=0; includeSubdomains; preload"
385 ]
386 },
387 "status": {
388 "message": "OK",
389 "code": 200
390 }
391 },
392 "request": {
393 "headers": {
394 "content-length": [
395 "342"
396 ],
397 "accept": [
398 "application/mercurial-0.1"
399 ],
400 "host": [
401 "phab.mercurial-scm.org"
402 ],
403 "user-agent": [
404 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
405 ],
406 "content-type": [
407 "application/x-www-form-urlencoded"
408 ]
409 },
410 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-q7y7rru5hbxnq2mtosrf%22%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22base+review+%28generate+test+for+phabsend%29%22%7D%5D%7D&output=json&__conduit__=1",
411 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
412 "method": "POST"
413 }
414 },
415 {
416 "response": {
417 "body": {
418 "string": "{\"result\":{\"diffid\":22438,\"phid\":\"PHID-DIFF-6lntv23mzadpzyeaizej\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/22438\\/\"},\"error_code\":null,\"error_info\":null}"
419 },
420 "headers": {
421 "referrer-policy": [
422 "no-referrer"
423 ],
424 "x-xss-protection": [
425 "1; mode=block"
426 ],
427 "server": [
428 "Apache/2.4.10 (Debian)"
429 ],
430 "cache-control": [
431 "no-store"
432 ],
433 "date": [
434 "Mon, 24 Aug 2020 22:18:19 GMT"
435 ],
436 "transfer-encoding": [
437 "chunked"
438 ],
439 "expires": [
440 "Sat, 01 Jan 2000 00:00:00 GMT"
441 ],
442 "x-frame-options": [
443 "Deny"
444 ],
445 "content-type": [
446 "application/json"
447 ],
448 "x-content-type-options": [
449 "nosniff"
450 ],
451 "strict-transport-security": [
452 "max-age=0; includeSubdomains; preload"
453 ]
454 },
455 "status": {
456 "message": "OK",
457 "code": 200
458 }
459 },
460 "request": {
461 "headers": {
462 "content-length": [
463 "1170"
464 ],
465 "accept": [
466 "application/mercurial-0.1"
467 ],
468 "host": [
469 "phab.mercurial-scm.org"
470 ],
471 "user-agent": [
472 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
473 ],
474 "content-type": [
475 "application/x-www-form-urlencoded"
476 ]
477 },
478 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-content%5Cn%2Bmore+content%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D&output=json&__conduit__=1",
479 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
480 "method": "POST"
481 }
482 },
483 {
484 "response": {
485 "body": {
486 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
487 },
488 "headers": {
489 "referrer-policy": [
490 "no-referrer"
491 ],
492 "x-xss-protection": [
493 "1; mode=block"
494 ],
495 "server": [
496 "Apache/2.4.10 (Debian)"
497 ],
498 "cache-control": [
499 "no-store"
500 ],
501 "date": [
502 "Mon, 24 Aug 2020 22:18:20 GMT"
503 ],
504 "transfer-encoding": [
505 "chunked"
506 ],
507 "expires": [
508 "Sat, 01 Jan 2000 00:00:00 GMT"
509 ],
510 "x-frame-options": [
511 "Deny"
512 ],
513 "content-type": [
514 "application/json"
515 ],
516 "x-content-type-options": [
517 "nosniff"
518 ],
519 "strict-transport-security": [
520 "max-age=0; includeSubdomains; preload"
521 ]
522 },
523 "status": {
524 "message": "OK",
525 "code": 200
526 }
527 },
528 "request": {
529 "headers": {
530 "content-length": [
531 "482"
532 ],
533 "accept": [
534 "application/mercurial-0.1"
535 ],
536 "host": [
537 "phab.mercurial-scm.org"
538 ],
539 "user-agent": [
540 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
541 ],
542 "content-type": [
543 "application/x-www-form-urlencoded"
544 ]
545 },
546 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22c2874a398f7e0a139283fad3df053430dac536ff%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+22438%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json&__conduit__=1",
547 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
548 "method": "POST"
549 }
550 },
551 {
552 "response": {
553 "body": {
554 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
555 },
556 "headers": {
557 "referrer-policy": [
558 "no-referrer"
559 ],
560 "x-xss-protection": [
561 "1; mode=block"
562 ],
563 "server": [
564 "Apache/2.4.10 (Debian)"
565 ],
566 "cache-control": [
567 "no-store"
568 ],
569 "date": [
570 "Mon, 24 Aug 2020 22:18:20 GMT"
571 ],
572 "transfer-encoding": [
573 "chunked"
574 ],
575 "expires": [
576 "Sat, 01 Jan 2000 00:00:00 GMT"
577 ],
578 "x-frame-options": [
579 "Deny"
580 ],
581 "content-type": [
582 "application/json"
583 ],
584 "x-content-type-options": [
585 "nosniff"
586 ],
587 "strict-transport-security": [
588 "max-age=0; includeSubdomains; preload"
589 ]
590 },
591 "status": {
592 "message": "OK",
593 "code": 200
594 }
595 },
596 "request": {
597 "headers": {
598 "content-length": [
599 "594"
600 ],
601 "accept": [
602 "application/mercurial-0.1"
603 ],
604 "host": [
605 "phab.mercurial-scm.org"
606 ],
607 "user-agent": [
608 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
609 ],
610 "content-type": [
611 "application/x-www-form-urlencoded"
612 ]
613 },
614 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22c2874a398f7e0a139283fad3df053430dac536ff%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22c2874a398f7e0a139283fad3df053430dac536ff%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22133c1c6c64494d545ad3c8bc4c2e42af215760c1%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+22438%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json&__conduit__=1",
615 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
616 "method": "POST"
617 }
618 },
619 {
620 "response": {
621 "body": {
622 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"133c1c6c6449 is my parent (generate test for phabsend)\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"133c1c6c6449 is my parent (generate test for phabsend)\"}]},\"error_code\":null,\"error_info\":null}"
623 },
624 "headers": {
625 "referrer-policy": [
626 "no-referrer"
627 ],
628 "x-xss-protection": [
629 "1; mode=block"
630 ],
631 "server": [
632 "Apache/2.4.10 (Debian)"
633 ],
634 "cache-control": [
635 "no-store"
636 ],
637 "date": [
638 "Mon, 24 Aug 2020 22:18:21 GMT"
639 ],
640 "transfer-encoding": [
641 "chunked"
642 ],
643 "expires": [
644 "Sat, 01 Jan 2000 00:00:00 GMT"
645 ],
646 "x-frame-options": [
647 "Deny"
648 ],
649 "content-type": [
650 "application/json"
651 ],
652 "x-content-type-options": [
653 "nosniff"
654 ],
655 "strict-transport-security": [
656 "max-age=0; includeSubdomains; preload"
657 ]
658 },
659 "status": {
660 "message": "OK",
661 "code": 200
662 }
663 },
664 "request": {
665 "headers": {
666 "content-length": [
667 "203"
668 ],
669 "accept": [
670 "application/mercurial-0.1"
671 ],
672 "host": [
673 "phab.mercurial-scm.org"
674 ],
675 "user-agent": [
676 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
677 ],
678 "content-type": [
679 "application/x-www-form-urlencoded"
680 ]
681 },
682 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22133c1c6c6449+is+my+parent+%28generate+test+for+phabsend%29%22%7D&output=json&__conduit__=1",
683 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
684 "method": "POST"
685 }
686 },
687 {
688 "response": {
689 "body": {
690 "string": "{\"result\":{\"object\":{\"id\":8946,\"phid\":\"PHID-DREV-ja6bdevg5fbykjrpghj4\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-pupj6v3uzdeb6iu\"},{\"phid\":\"PHID-XACT-DREV-czsnsiuaxsecqf4\"},{\"phid\":\"PHID-XACT-DREV-qs6vcl5qj4cqyu2\"},{\"phid\":\"PHID-XACT-DREV-qig4ohigvfnr4h2\"},{\"phid\":\"PHID-XACT-DREV-iv6asp4osxnslvs\"},{\"phid\":\"PHID-XACT-DREV-jn3ojiw6yt3mzuz\"}]},\"error_code\":null,\"error_info\":null}"
691 },
692 "headers": {
693 "referrer-policy": [
694 "no-referrer"
695 ],
696 "x-xss-protection": [
697 "1; mode=block"
698 ],
699 "server": [
700 "Apache/2.4.10 (Debian)"
701 ],
702 "cache-control": [
703 "no-store"
704 ],
705 "date": [
706 "Mon, 24 Aug 2020 22:18:22 GMT"
707 ],
708 "transfer-encoding": [
709 "chunked"
710 ],
711 "expires": [
712 "Sat, 01 Jan 2000 00:00:00 GMT"
713 ],
714 "x-frame-options": [
715 "Deny"
716 ],
717 "content-type": [
718 "application/json"
719 ],
720 "x-content-type-options": [
721 "nosniff"
722 ],
723 "strict-transport-security": [
724 "max-age=0; includeSubdomains; preload"
725 ]
726 },
727 "status": {
728 "message": "OK",
729 "code": 200
730 }
731 },
732 "request": {
733 "headers": {
734 "content-length": [
735 "458"
736 ],
737 "accept": [
738 "application/mercurial-0.1"
739 ],
740 "host": [
741 "phab.mercurial-scm.org"
742 ],
743 "user-agent": [
744 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
745 ],
746 "content-type": [
747 "application/x-www-form-urlencoded"
748 ]
749 },
750 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-6lntv23mzadpzyeaizej%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-suqt5s55kjw235uv2vcf%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22133c1c6c6449+is+my+parent+%28generate+test+for+phabsend%29%22%7D%5D%7D&output=json&__conduit__=1",
751 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
752 "method": "POST"
753 }
754 },
755 {
756 "response": {
757 "body": {
758 "string": "{\"result\":[{\"id\":\"8946\",\"phid\":\"PHID-DREV-ja6bdevg5fbykjrpghj4\",\"title\":\"133c1c6c6449 is my parent (generate test for phabsend)\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8946\",\"dateCreated\":\"1598307502\",\"dateModified\":\"1598307502\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-6lntv23mzadpzyeaizej\",\"diffs\":[\"22438\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-nf7kno6lkl3fjsmo5pyp\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-suqt5s55kjw235uv2vcf\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8945\",\"phid\":\"PHID-DREV-suqt5s55kjw235uv2vcf\",\"title\":\"base review (generate test for phabsend)\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8945\",\"dateCreated\":\"1598307498\",\"dateModified\":\"1598307502\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-q7y7rru5hbxnq2mtosrf\",\"diffs\":[\"22437\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-nf7kno6lkl3fjsmo5pyp\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
759 },
760 "headers": {
761 "referrer-policy": [
762 "no-referrer"
763 ],
764 "x-xss-protection": [
765 "1; mode=block"
766 ],
767 "server": [
768 "Apache/2.4.10 (Debian)"
769 ],
770 "cache-control": [
771 "no-store"
772 ],
773 "date": [
774 "Mon, 24 Aug 2020 22:18:23 GMT"
775 ],
776 "transfer-encoding": [
777 "chunked"
778 ],
779 "expires": [
780 "Sat, 01 Jan 2000 00:00:00 GMT"
781 ],
782 "x-frame-options": [
783 "Deny"
784 ],
785 "content-type": [
786 "application/json"
787 ],
788 "x-content-type-options": [
789 "nosniff"
790 ],
791 "strict-transport-security": [
792 "max-age=0; includeSubdomains; preload"
793 ]
794 },
795 "status": {
796 "message": "OK",
797 "code": 200
798 }
799 },
800 "request": {
801 "headers": {
802 "content-length": [
803 "154"
804 ],
805 "accept": [
806 "application/mercurial-0.1"
807 ],
808 "host": [
809 "phab.mercurial-scm.org"
810 ],
811 "user-agent": [
812 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
813 ],
814 "content-type": [
815 "application/x-www-form-urlencoded"
816 ]
817 },
818 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8945%2C+8946%5D%7D&output=json&__conduit__=1",
819 "uri": "https://phab.mercurial-scm.org//api/differential.query",
820 "method": "POST"
821 }
822 },
823 {
824 "response": {
825 "body": {
826 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
827 },
828 "headers": {
829 "referrer-policy": [
830 "no-referrer"
831 ],
832 "x-xss-protection": [
833 "1; mode=block"
834 ],
835 "server": [
836 "Apache/2.4.10 (Debian)"
837 ],
838 "cache-control": [
839 "no-store"
840 ],
841 "date": [
842 "Mon, 24 Aug 2020 22:18:23 GMT"
843 ],
844 "transfer-encoding": [
845 "chunked"
846 ],
847 "expires": [
848 "Sat, 01 Jan 2000 00:00:00 GMT"
849 ],
850 "x-frame-options": [
851 "Deny"
852 ],
853 "content-type": [
854 "application/json"
855 ],
856 "x-content-type-options": [
857 "nosniff"
858 ],
859 "strict-transport-security": [
860 "max-age=0; includeSubdomains; preload"
861 ]
862 },
863 "status": {
864 "message": "OK",
865 "code": 200
866 }
867 },
868 "request": {
869 "headers": {
870 "content-length": [
871 "482"
872 ],
873 "accept": [
874 "application/mercurial-0.1"
875 ],
876 "host": [
877 "phab.mercurial-scm.org"
878 ],
879 "user-agent": [
880 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
881 ],
882 "content-type": [
883 "application/x-www-form-urlencoded"
884 ]
885 },
886 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22f444f060f4d648731890a4aee1ec5ce372170265%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+22437%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json&__conduit__=1",
887 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
888 "method": "POST"
889 }
890 },
891 {
892 "response": {
893 "body": {
894 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
895 },
896 "headers": {
897 "referrer-policy": [
898 "no-referrer"
899 ],
900 "x-xss-protection": [
901 "1; mode=block"
902 ],
903 "server": [
904 "Apache/2.4.10 (Debian)"
905 ],
906 "cache-control": [
907 "no-store"
908 ],
909 "date": [
910 "Mon, 24 Aug 2020 22:18:24 GMT"
911 ],
912 "transfer-encoding": [
913 "chunked"
914 ],
915 "expires": [
916 "Sat, 01 Jan 2000 00:00:00 GMT"
917 ],
918 "x-frame-options": [
919 "Deny"
920 ],
921 "content-type": [
922 "application/json"
923 ],
924 "x-content-type-options": [
925 "nosniff"
926 ],
927 "strict-transport-security": [
928 "max-age=0; includeSubdomains; preload"
929 ]
930 },
931 "status": {
932 "message": "OK",
933 "code": 200
934 }
935 },
936 "request": {
937 "headers": {
938 "content-length": [
939 "594"
940 ],
941 "accept": [
942 "application/mercurial-0.1"
943 ],
944 "host": [
945 "phab.mercurial-scm.org"
946 ],
947 "user-agent": [
948 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
949 ],
950 "content-type": [
951 "application/x-www-form-urlencoded"
952 ]
953 },
954 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22f444f060f4d648731890a4aee1ec5ce372170265%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22f444f060f4d648731890a4aee1ec5ce372170265%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22e919cdf3d4fe9a926427b1961601eeaf4b4e2caf%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+22437%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json&__conduit__=1",
955 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
956 "method": "POST"
957 }
958 },
959 {
960 "response": {
961 "body": {
962 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
963 },
964 "headers": {
965 "referrer-policy": [
966 "no-referrer"
967 ],
968 "x-xss-protection": [
969 "1; mode=block"
970 ],
971 "server": [
972 "Apache/2.4.10 (Debian)"
973 ],
974 "cache-control": [
975 "no-store"
976 ],
977 "date": [
978 "Mon, 24 Aug 2020 22:18:24 GMT"
979 ],
980 "transfer-encoding": [
981 "chunked"
982 ],
983 "expires": [
984 "Sat, 01 Jan 2000 00:00:00 GMT"
985 ],
986 "x-frame-options": [
987 "Deny"
988 ],
989 "content-type": [
990 "application/json"
991 ],
992 "x-content-type-options": [
993 "nosniff"
994 ],
995 "strict-transport-security": [
996 "max-age=0; includeSubdomains; preload"
997 ]
998 },
999 "status": {
1000 "message": "OK",
1001 "code": 200
1002 }
1003 },
1004 "request": {
1005 "headers": {
1006 "content-length": [
1007 "482"
1008 ],
1009 "accept": [
1010 "application/mercurial-0.1"
1011 ],
1012 "host": [
1013 "phab.mercurial-scm.org"
1014 ],
1015 "user-agent": [
1016 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
1017 ],
1018 "content-type": [
1019 "application/x-www-form-urlencoded"
1020 ]
1021 },
1022 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%229c9290f945b15b9420fffd5f5fc59260c1cbbcf4%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22f444f060f4d648731890a4aee1ec5ce372170265%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+22438%2C+%22name%22%3A+%22hg%3Ameta%22%7D&output=json&__conduit__=1",
1023 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1024 "method": "POST"
1025 }
1026 },
1027 {
1028 "response": {
1029 "body": {
1030 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1031 },
1032 "headers": {
1033 "referrer-policy": [
1034 "no-referrer"
1035 ],
1036 "x-xss-protection": [
1037 "1; mode=block"
1038 ],
1039 "server": [
1040 "Apache/2.4.10 (Debian)"
1041 ],
1042 "cache-control": [
1043 "no-store"
1044 ],
1045 "date": [
1046 "Mon, 24 Aug 2020 22:18:25 GMT"
1047 ],
1048 "transfer-encoding": [
1049 "chunked"
1050 ],
1051 "expires": [
1052 "Sat, 01 Jan 2000 00:00:00 GMT"
1053 ],
1054 "x-frame-options": [
1055 "Deny"
1056 ],
1057 "content-type": [
1058 "application/json"
1059 ],
1060 "x-content-type-options": [
1061 "nosniff"
1062 ],
1063 "strict-transport-security": [
1064 "max-age=0; includeSubdomains; preload"
1065 ]
1066 },
1067 "status": {
1068 "message": "OK",
1069 "code": 200
1070 }
1071 },
1072 "request": {
1073 "headers": {
1074 "content-length": [
1075 "594"
1076 ],
1077 "accept": [
1078 "application/mercurial-0.1"
1079 ],
1080 "host": [
1081 "phab.mercurial-scm.org"
1082 ],
1083 "user-agent": [
1084 "mercurial/proto-1.0 (Mercurial 5.5+173-3de55438d570+20200824)"
1085 ],
1086 "content-type": [
1087 "application/x-www-form-urlencoded"
1088 ]
1089 },
1090 "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%229c9290f945b15b9420fffd5f5fc59260c1cbbcf4%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%229c9290f945b15b9420fffd5f5fc59260c1cbbcf4%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22f444f060f4d648731890a4aee1ec5ce372170265%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+22438%2C+%22name%22%3A+%22local%3Acommits%22%7D&output=json&__conduit__=1",
1091 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1092 "method": "POST"
1093 }
1094 }
1095 ]
1096 } No newline at end of file
@@ -1,2304 +1,2310 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 rewriteutil,
79 scmutil,
80 scmutil,
80 smartset,
81 smartset,
81 tags,
82 tags,
82 templatefilters,
83 templatefilters,
83 templateutil,
84 templateutil,
84 url as urlmod,
85 url as urlmod,
85 util,
86 util,
86 )
87 )
87 from mercurial.utils import (
88 from mercurial.utils import (
88 procutil,
89 procutil,
89 stringutil,
90 stringutil,
90 )
91 )
91 from . import show
92 from . import show
92
93
93
94
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
98 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
99 testedwith = b'ships-with-hg-core'
99
100
100 eh = exthelper.exthelper()
101 eh = exthelper.exthelper()
101
102
102 cmdtable = eh.cmdtable
103 cmdtable = eh.cmdtable
103 command = eh.command
104 command = eh.command
104 configtable = eh.configtable
105 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
106 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
107 uisetup = eh.finaluisetup
107
108
108 # developer config: phabricator.batchsize
109 # developer config: phabricator.batchsize
109 eh.configitem(
110 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
111 b'phabricator', b'batchsize', default=12,
111 )
112 )
112 eh.configitem(
113 eh.configitem(
113 b'phabricator', b'callsign', default=None,
114 b'phabricator', b'callsign', default=None,
114 )
115 )
115 eh.configitem(
116 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
117 b'phabricator', b'curlcmd', default=None,
117 )
118 )
118 # developer config: phabricator.debug
119 # developer config: phabricator.debug
119 eh.configitem(
120 eh.configitem(
120 b'phabricator', b'debug', default=False,
121 b'phabricator', b'debug', default=False,
121 )
122 )
122 # developer config: phabricator.repophid
123 # developer config: phabricator.repophid
123 eh.configitem(
124 eh.configitem(
124 b'phabricator', b'repophid', default=None,
125 b'phabricator', b'repophid', default=None,
125 )
126 )
126 eh.configitem(
127 eh.configitem(
127 b'phabricator', b'url', default=None,
128 b'phabricator', b'url', default=None,
128 )
129 )
129 eh.configitem(
130 eh.configitem(
130 b'phabsend', b'confirm', default=False,
131 b'phabsend', b'confirm', default=False,
131 )
132 )
132 eh.configitem(
133 eh.configitem(
133 b'phabimport', b'secret', default=False,
134 b'phabimport', b'secret', default=False,
134 )
135 )
135 eh.configitem(
136 eh.configitem(
136 b'phabimport', b'obsolete', default=False,
137 b'phabimport', b'obsolete', default=False,
137 )
138 )
138
139
139 colortable = {
140 colortable = {
140 b'phabricator.action.created': b'green',
141 b'phabricator.action.created': b'green',
141 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.updated': b'magenta',
143 b'phabricator.action.updated': b'magenta',
143 b'phabricator.desc': b'',
144 b'phabricator.desc': b'',
144 b'phabricator.drev': b'bold',
145 b'phabricator.drev': b'bold',
145 b'phabricator.node': b'',
146 b'phabricator.node': b'',
146 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.closed': b'green',
149 b'phabricator.status.closed': b'green',
149 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.changesplanned': b'red',
152 b'phabricator.status.changesplanned': b'red',
152 }
153 }
153
154
154 _VCR_FLAGS = [
155 _VCR_FLAGS = [
155 (
156 (
156 b'',
157 b'',
157 b'test-vcr',
158 b'test-vcr',
158 b'',
159 b'',
159 _(
160 _(
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b', otherwise will mock all http requests using the specified vcr file.'
162 b', otherwise will mock all http requests using the specified vcr file.'
162 b' (ADVANCED)'
163 b' (ADVANCED)'
163 ),
164 ),
164 ),
165 ),
165 ]
166 ]
166
167
167
168
168 @eh.wrapfunction(localrepo, "loadhgrc")
169 @eh.wrapfunction(localrepo, "loadhgrc")
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """
172 """
172 result = False
173 result = False
173 arcconfig = {}
174 arcconfig = {}
174
175
175 try:
176 try:
176 # json.loads only accepts bytes from 3.6+
177 # json.loads only accepts bytes from 3.6+
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 # json.loads only returns unicode strings
179 # json.loads only returns unicode strings
179 arcconfig = pycompat.rapply(
180 arcconfig = pycompat.rapply(
180 lambda x: encoding.unitolocal(x)
181 lambda x: encoding.unitolocal(x)
181 if isinstance(x, pycompat.unicode)
182 if isinstance(x, pycompat.unicode)
182 else x,
183 else x,
183 pycompat.json_loads(rawparams),
184 pycompat.json_loads(rawparams),
184 )
185 )
185
186
186 result = True
187 result = True
187 except ValueError:
188 except ValueError:
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 except IOError:
190 except IOError:
190 pass
191 pass
191
192
192 cfg = util.sortdict()
193 cfg = util.sortdict()
193
194
194 if b"repository.callsign" in arcconfig:
195 if b"repository.callsign" in arcconfig:
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196
197
197 if b"phabricator.uri" in arcconfig:
198 if b"phabricator.uri" in arcconfig:
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199
200
200 if cfg:
201 if cfg:
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202
203
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204
205
205
206
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 fullflags = flags + _VCR_FLAGS
208 fullflags = flags + _VCR_FLAGS
208
209
209 def hgmatcher(r1, r2):
210 def hgmatcher(r1, r2):
210 if r1.uri != r2.uri or r1.method != r2.method:
211 if r1.uri != r2.uri or r1.method != r2.method:
211 return False
212 return False
212 r1params = util.urlreq.parseqs(r1.body)
213 r1params = util.urlreq.parseqs(r1.body)
213 r2params = util.urlreq.parseqs(r2.body)
214 r2params = util.urlreq.parseqs(r2.body)
214 for key in r1params:
215 for key in r1params:
215 if key not in r2params:
216 if key not in r2params:
216 return False
217 return False
217 value = r1params[key][0]
218 value = r1params[key][0]
218 # we want to compare json payloads without worrying about ordering
219 # we want to compare json payloads without worrying about ordering
219 if value.startswith(b'{') and value.endswith(b'}'):
220 if value.startswith(b'{') and value.endswith(b'}'):
220 r1json = pycompat.json_loads(value)
221 r1json = pycompat.json_loads(value)
221 r2json = pycompat.json_loads(r2params[key][0])
222 r2json = pycompat.json_loads(r2params[key][0])
222 if r1json != r2json:
223 if r1json != r2json:
223 return False
224 return False
224 elif r2params[key][0] != value:
225 elif r2params[key][0] != value:
225 return False
226 return False
226 return True
227 return True
227
228
228 def sanitiserequest(request):
229 def sanitiserequest(request):
229 request.body = re.sub(
230 request.body = re.sub(
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 )
232 )
232 return request
233 return request
233
234
234 def sanitiseresponse(response):
235 def sanitiseresponse(response):
235 if 'set-cookie' in response['headers']:
236 if 'set-cookie' in response['headers']:
236 del response['headers']['set-cookie']
237 del response['headers']['set-cookie']
237 return response
238 return response
238
239
239 def decorate(fn):
240 def decorate(fn):
240 def inner(*args, **kwargs):
241 def inner(*args, **kwargs):
241 vcr = kwargs.pop('test_vcr')
242 vcr = kwargs.pop('test_vcr')
242 if vcr:
243 if vcr:
243 cassette = pycompat.fsdecode(vcr)
244 cassette = pycompat.fsdecode(vcr)
244 import hgdemandimport
245 import hgdemandimport
245
246
246 with hgdemandimport.deactivated():
247 with hgdemandimport.deactivated():
247 import vcr as vcrmod
248 import vcr as vcrmod
248 import vcr.stubs as stubs
249 import vcr.stubs as stubs
249
250
250 vcr = vcrmod.VCR(
251 vcr = vcrmod.VCR(
251 serializer='json',
252 serializer='json',
252 before_record_request=sanitiserequest,
253 before_record_request=sanitiserequest,
253 before_record_response=sanitiseresponse,
254 before_record_response=sanitiseresponse,
254 custom_patches=[
255 custom_patches=[
255 (
256 (
256 urlmod,
257 urlmod,
257 'httpconnection',
258 'httpconnection',
258 stubs.VCRHTTPConnection,
259 stubs.VCRHTTPConnection,
259 ),
260 ),
260 (
261 (
261 urlmod,
262 urlmod,
262 'httpsconnection',
263 'httpsconnection',
263 stubs.VCRHTTPSConnection,
264 stubs.VCRHTTPSConnection,
264 ),
265 ),
265 ],
266 ],
266 )
267 )
267 vcr.register_matcher('hgmatcher', hgmatcher)
268 vcr.register_matcher('hgmatcher', hgmatcher)
268 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
269 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
269 return fn(*args, **kwargs)
270 return fn(*args, **kwargs)
270 return fn(*args, **kwargs)
271 return fn(*args, **kwargs)
271
272
272 cmd = util.checksignature(inner, depth=2)
273 cmd = util.checksignature(inner, depth=2)
273 cmd.__name__ = fn.__name__
274 cmd.__name__ = fn.__name__
274 cmd.__doc__ = fn.__doc__
275 cmd.__doc__ = fn.__doc__
275
276
276 return command(
277 return command(
277 name,
278 name,
278 fullflags,
279 fullflags,
279 spec,
280 spec,
280 helpcategory=helpcategory,
281 helpcategory=helpcategory,
281 optionalrepo=optionalrepo,
282 optionalrepo=optionalrepo,
282 )(cmd)
283 )(cmd)
283
284
284 return decorate
285 return decorate
285
286
286
287
287 def _debug(ui, *msg, **opts):
288 def _debug(ui, *msg, **opts):
288 """write debug output for Phabricator if ``phabricator.debug`` is set
289 """write debug output for Phabricator if ``phabricator.debug`` is set
289
290
290 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
291 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
291 printed with the --debug argument.
292 printed with the --debug argument.
292 """
293 """
293 if ui.configbool(b"phabricator", b"debug"):
294 if ui.configbool(b"phabricator", b"debug"):
294 flag = ui.debugflag
295 flag = ui.debugflag
295 try:
296 try:
296 ui.debugflag = True
297 ui.debugflag = True
297 ui.write(*msg, **opts)
298 ui.write(*msg, **opts)
298 finally:
299 finally:
299 ui.debugflag = flag
300 ui.debugflag = flag
300
301
301
302
302 def urlencodenested(params):
303 def urlencodenested(params):
303 """like urlencode, but works with nested parameters.
304 """like urlencode, but works with nested parameters.
304
305
305 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
306 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
306 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
307 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
307 urlencode. Note: the encoding is consistent with PHP's http_build_query.
308 urlencode. Note: the encoding is consistent with PHP's http_build_query.
308 """
309 """
309 flatparams = util.sortdict()
310 flatparams = util.sortdict()
310
311
311 def process(prefix, obj):
312 def process(prefix, obj):
312 if isinstance(obj, bool):
313 if isinstance(obj, bool):
313 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
314 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
314 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
315 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
315 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
316 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
316 if items is None:
317 if items is None:
317 flatparams[prefix] = obj
318 flatparams[prefix] = obj
318 else:
319 else:
319 for k, v in items(obj):
320 for k, v in items(obj):
320 if prefix:
321 if prefix:
321 process(b'%s[%s]' % (prefix, k), v)
322 process(b'%s[%s]' % (prefix, k), v)
322 else:
323 else:
323 process(k, v)
324 process(k, v)
324
325
325 process(b'', params)
326 process(b'', params)
326 return util.urlreq.urlencode(flatparams)
327 return util.urlreq.urlencode(flatparams)
327
328
328
329
329 def readurltoken(ui):
330 def readurltoken(ui):
330 """return conduit url, token and make sure they exist
331 """return conduit url, token and make sure they exist
331
332
332 Currently read from [auth] config section. In the future, it might
333 Currently read from [auth] config section. In the future, it might
333 make sense to read from .arcconfig and .arcrc as well.
334 make sense to read from .arcconfig and .arcrc as well.
334 """
335 """
335 url = ui.config(b'phabricator', b'url')
336 url = ui.config(b'phabricator', b'url')
336 if not url:
337 if not url:
337 raise error.Abort(
338 raise error.Abort(
338 _(b'config %s.%s is required') % (b'phabricator', b'url')
339 _(b'config %s.%s is required') % (b'phabricator', b'url')
339 )
340 )
340
341
341 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
342 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
342 token = None
343 token = None
343
344
344 if res:
345 if res:
345 group, auth = res
346 group, auth = res
346
347
347 ui.debug(b"using auth.%s.* for authentication\n" % group)
348 ui.debug(b"using auth.%s.* for authentication\n" % group)
348
349
349 token = auth.get(b'phabtoken')
350 token = auth.get(b'phabtoken')
350
351
351 if not token:
352 if not token:
352 raise error.Abort(
353 raise error.Abort(
353 _(b'Can\'t find conduit token associated to %s') % (url,)
354 _(b'Can\'t find conduit token associated to %s') % (url,)
354 )
355 )
355
356
356 return url, token
357 return url, token
357
358
358
359
359 def callconduit(ui, name, params):
360 def callconduit(ui, name, params):
360 """call Conduit API, params is a dict. return json.loads result, or None"""
361 """call Conduit API, params is a dict. return json.loads result, or None"""
361 host, token = readurltoken(ui)
362 host, token = readurltoken(ui)
362 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
363 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
363 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
364 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
364 params = params.copy()
365 params = params.copy()
365 params[b'__conduit__'] = {
366 params[b'__conduit__'] = {
366 b'token': token,
367 b'token': token,
367 }
368 }
368 rawdata = {
369 rawdata = {
369 b'params': templatefilters.json(params),
370 b'params': templatefilters.json(params),
370 b'output': b'json',
371 b'output': b'json',
371 b'__conduit__': 1,
372 b'__conduit__': 1,
372 }
373 }
373 data = urlencodenested(rawdata)
374 data = urlencodenested(rawdata)
374 curlcmd = ui.config(b'phabricator', b'curlcmd')
375 curlcmd = ui.config(b'phabricator', b'curlcmd')
375 if curlcmd:
376 if curlcmd:
376 sin, sout = procutil.popen2(
377 sin, sout = procutil.popen2(
377 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
378 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
378 )
379 )
379 sin.write(data)
380 sin.write(data)
380 sin.close()
381 sin.close()
381 body = sout.read()
382 body = sout.read()
382 else:
383 else:
383 urlopener = urlmod.opener(ui, authinfo)
384 urlopener = urlmod.opener(ui, authinfo)
384 request = util.urlreq.request(pycompat.strurl(url), data=data)
385 request = util.urlreq.request(pycompat.strurl(url), data=data)
385 with contextlib.closing(urlopener.open(request)) as rsp:
386 with contextlib.closing(urlopener.open(request)) as rsp:
386 body = rsp.read()
387 body = rsp.read()
387 ui.debug(b'Conduit Response: %s\n' % body)
388 ui.debug(b'Conduit Response: %s\n' % body)
388 parsed = pycompat.rapply(
389 parsed = pycompat.rapply(
389 lambda x: encoding.unitolocal(x)
390 lambda x: encoding.unitolocal(x)
390 if isinstance(x, pycompat.unicode)
391 if isinstance(x, pycompat.unicode)
391 else x,
392 else x,
392 # json.loads only accepts bytes from py3.6+
393 # json.loads only accepts bytes from py3.6+
393 pycompat.json_loads(encoding.unifromlocal(body)),
394 pycompat.json_loads(encoding.unifromlocal(body)),
394 )
395 )
395 if parsed.get(b'error_code'):
396 if parsed.get(b'error_code'):
396 msg = _(b'Conduit Error (%s): %s') % (
397 msg = _(b'Conduit Error (%s): %s') % (
397 parsed[b'error_code'],
398 parsed[b'error_code'],
398 parsed[b'error_info'],
399 parsed[b'error_info'],
399 )
400 )
400 raise error.Abort(msg)
401 raise error.Abort(msg)
401 return parsed[b'result']
402 return parsed[b'result']
402
403
403
404
404 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
405 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
405 def debugcallconduit(ui, repo, name):
406 def debugcallconduit(ui, repo, name):
406 """call Conduit API
407 """call Conduit API
407
408
408 Call parameters are read from stdin as a JSON blob. Result will be written
409 Call parameters are read from stdin as a JSON blob. Result will be written
409 to stdout as a JSON blob.
410 to stdout as a JSON blob.
410 """
411 """
411 # json.loads only accepts bytes from 3.6+
412 # json.loads only accepts bytes from 3.6+
412 rawparams = encoding.unifromlocal(ui.fin.read())
413 rawparams = encoding.unifromlocal(ui.fin.read())
413 # json.loads only returns unicode strings
414 # json.loads only returns unicode strings
414 params = pycompat.rapply(
415 params = pycompat.rapply(
415 lambda x: encoding.unitolocal(x)
416 lambda x: encoding.unitolocal(x)
416 if isinstance(x, pycompat.unicode)
417 if isinstance(x, pycompat.unicode)
417 else x,
418 else x,
418 pycompat.json_loads(rawparams),
419 pycompat.json_loads(rawparams),
419 )
420 )
420 # json.dumps only accepts unicode strings
421 # json.dumps only accepts unicode strings
421 result = pycompat.rapply(
422 result = pycompat.rapply(
422 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
423 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
423 callconduit(ui, name, params),
424 callconduit(ui, name, params),
424 )
425 )
425 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
426 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
426 ui.write(b'%s\n' % encoding.unitolocal(s))
427 ui.write(b'%s\n' % encoding.unitolocal(s))
427
428
428
429
429 def getrepophid(repo):
430 def getrepophid(repo):
430 """given callsign, return repository PHID or None"""
431 """given callsign, return repository PHID or None"""
431 # developer config: phabricator.repophid
432 # developer config: phabricator.repophid
432 repophid = repo.ui.config(b'phabricator', b'repophid')
433 repophid = repo.ui.config(b'phabricator', b'repophid')
433 if repophid:
434 if repophid:
434 return repophid
435 return repophid
435 callsign = repo.ui.config(b'phabricator', b'callsign')
436 callsign = repo.ui.config(b'phabricator', b'callsign')
436 if not callsign:
437 if not callsign:
437 return None
438 return None
438 query = callconduit(
439 query = callconduit(
439 repo.ui,
440 repo.ui,
440 b'diffusion.repository.search',
441 b'diffusion.repository.search',
441 {b'constraints': {b'callsigns': [callsign]}},
442 {b'constraints': {b'callsigns': [callsign]}},
442 )
443 )
443 if len(query[b'data']) == 0:
444 if len(query[b'data']) == 0:
444 return None
445 return None
445 repophid = query[b'data'][0][b'phid']
446 repophid = query[b'data'][0][b'phid']
446 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
447 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
447 return repophid
448 return repophid
448
449
449
450
450 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
451 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
451 _differentialrevisiondescre = re.compile(
452 _differentialrevisiondescre = re.compile(
452 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
453 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
453 )
454 )
454
455
455
456
456 def getoldnodedrevmap(repo, nodelist):
457 def getoldnodedrevmap(repo, nodelist):
457 """find previous nodes that has been sent to Phabricator
458 """find previous nodes that has been sent to Phabricator
458
459
459 return {node: (oldnode, Differential diff, Differential Revision ID)}
460 return {node: (oldnode, Differential diff, Differential Revision ID)}
460 for node in nodelist with known previous sent versions, or associated
461 for node in nodelist with known previous sent versions, or associated
461 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
462 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
462 be ``None``.
463 be ``None``.
463
464
464 Examines commit messages like "Differential Revision:" to get the
465 Examines commit messages like "Differential Revision:" to get the
465 association information.
466 association information.
466
467
467 If such commit message line is not found, examines all precursors and their
468 If such commit message line is not found, examines all precursors and their
468 tags. Tags with format like "D1234" are considered a match and the node
469 tags. Tags with format like "D1234" are considered a match and the node
469 with that tag, and the number after "D" (ex. 1234) will be returned.
470 with that tag, and the number after "D" (ex. 1234) will be returned.
470
471
471 The ``old node``, if not None, is guaranteed to be the last diff of
472 The ``old node``, if not None, is guaranteed to be the last diff of
472 corresponding Differential Revision, and exist in the repo.
473 corresponding Differential Revision, and exist in the repo.
473 """
474 """
474 unfi = repo.unfiltered()
475 unfi = repo.unfiltered()
475 has_node = unfi.changelog.index.has_node
476 has_node = unfi.changelog.index.has_node
476
477
477 result = {} # {node: (oldnode?, lastdiff?, drev)}
478 result = {} # {node: (oldnode?, lastdiff?, drev)}
478 # ordered for test stability when printing new -> old mapping below
479 # ordered for test stability when printing new -> old mapping below
479 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
480 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
480 for node in nodelist:
481 for node in nodelist:
481 ctx = unfi[node]
482 ctx = unfi[node]
482 # For tags like "D123", put them into "toconfirm" to verify later
483 # For tags like "D123", put them into "toconfirm" to verify later
483 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
484 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
484 for n in precnodes:
485 for n in precnodes:
485 if has_node(n):
486 if has_node(n):
486 for tag in unfi.nodetags(n):
487 for tag in unfi.nodetags(n):
487 m = _differentialrevisiontagre.match(tag)
488 m = _differentialrevisiontagre.match(tag)
488 if m:
489 if m:
489 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
490 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
490 break
491 break
491 else:
492 else:
492 continue # move to next predecessor
493 continue # move to next predecessor
493 break # found a tag, stop
494 break # found a tag, stop
494 else:
495 else:
495 # Check commit message
496 # Check commit message
496 m = _differentialrevisiondescre.search(ctx.description())
497 m = _differentialrevisiondescre.search(ctx.description())
497 if m:
498 if m:
498 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
499 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
499
500
500 # Double check if tags are genuine by collecting all old nodes from
501 # Double check if tags are genuine by collecting all old nodes from
501 # Phabricator, and expect precursors overlap with it.
502 # Phabricator, and expect precursors overlap with it.
502 if toconfirm:
503 if toconfirm:
503 drevs = [drev for force, precs, drev in toconfirm.values()]
504 drevs = [drev for force, precs, drev in toconfirm.values()]
504 alldiffs = callconduit(
505 alldiffs = callconduit(
505 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
506 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
506 )
507 )
507
508
508 def getnodes(d, precset):
509 def getnodes(d, precset):
509 # Ignore other nodes that were combined into the Differential
510 # Ignore other nodes that were combined into the Differential
510 # that aren't predecessors of the current local node.
511 # that aren't predecessors of the current local node.
511 return [n for n in getlocalcommits(d) if n in precset]
512 return [n for n in getlocalcommits(d) if n in precset]
512
513
513 for newnode, (force, precset, drev) in toconfirm.items():
514 for newnode, (force, precset, drev) in toconfirm.items():
514 diffs = [
515 diffs = [
515 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
516 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
516 ]
517 ]
517
518
518 # local predecessors known by Phabricator
519 # local predecessors known by Phabricator
519 phprecset = {n for d in diffs for n in getnodes(d, precset)}
520 phprecset = {n for d in diffs for n in getnodes(d, precset)}
520
521
521 # Ignore if precursors (Phabricator and local repo) do not overlap,
522 # Ignore if precursors (Phabricator and local repo) do not overlap,
522 # and force is not set (when commit message says nothing)
523 # and force is not set (when commit message says nothing)
523 if not force and not phprecset:
524 if not force and not phprecset:
524 tagname = b'D%d' % drev
525 tagname = b'D%d' % drev
525 tags.tag(
526 tags.tag(
526 repo,
527 repo,
527 tagname,
528 tagname,
528 nullid,
529 nullid,
529 message=None,
530 message=None,
530 user=None,
531 user=None,
531 date=None,
532 date=None,
532 local=True,
533 local=True,
533 )
534 )
534 unfi.ui.warn(
535 unfi.ui.warn(
535 _(
536 _(
536 b'D%d: local tag removed - does not match '
537 b'D%d: local tag removed - does not match '
537 b'Differential history\n'
538 b'Differential history\n'
538 )
539 )
539 % drev
540 % drev
540 )
541 )
541 continue
542 continue
542
543
543 # Find the last node using Phabricator metadata, and make sure it
544 # Find the last node using Phabricator metadata, and make sure it
544 # exists in the repo
545 # exists in the repo
545 oldnode = lastdiff = None
546 oldnode = lastdiff = None
546 if diffs:
547 if diffs:
547 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
548 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
548 oldnodes = getnodes(lastdiff, precset)
549 oldnodes = getnodes(lastdiff, precset)
549
550
550 _debug(
551 _debug(
551 unfi.ui,
552 unfi.ui,
552 b"%s mapped to old nodes %s\n"
553 b"%s mapped to old nodes %s\n"
553 % (
554 % (
554 short(newnode),
555 short(newnode),
555 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
556 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
556 ),
557 ),
557 )
558 )
558
559
559 # If this commit was the result of `hg fold` after submission,
560 # If this commit was the result of `hg fold` after submission,
560 # and now resubmitted with --fold, the easiest thing to do is
561 # and now resubmitted with --fold, the easiest thing to do is
561 # to leave the node clear. This only results in creating a new
562 # to leave the node clear. This only results in creating a new
562 # diff for the _same_ Differential Revision if this commit is
563 # diff for the _same_ Differential Revision if this commit is
563 # the first or last in the selected range. If we picked a node
564 # the first or last in the selected range. If we picked a node
564 # from the list instead, it would have to be the lowest if at
565 # from the list instead, it would have to be the lowest if at
565 # the beginning of the --fold range, or the highest at the end.
566 # the beginning of the --fold range, or the highest at the end.
566 # Otherwise, one or more of the nodes wouldn't be considered in
567 # Otherwise, one or more of the nodes wouldn't be considered in
567 # the diff, and the Differential wouldn't be properly updated.
568 # the diff, and the Differential wouldn't be properly updated.
568 # If this commit is the result of `hg split` in the same
569 # If this commit is the result of `hg split` in the same
569 # scenario, there is a single oldnode here (and multiple
570 # scenario, there is a single oldnode here (and multiple
570 # newnodes mapped to it). That makes it the same as the normal
571 # newnodes mapped to it). That makes it the same as the normal
571 # case, as the edges of the newnode range cleanly maps to one
572 # case, as the edges of the newnode range cleanly maps to one
572 # oldnode each.
573 # oldnode each.
573 if len(oldnodes) == 1:
574 if len(oldnodes) == 1:
574 oldnode = oldnodes[0]
575 oldnode = oldnodes[0]
575 if oldnode and not has_node(oldnode):
576 if oldnode and not has_node(oldnode):
576 oldnode = None
577 oldnode = None
577
578
578 result[newnode] = (oldnode, lastdiff, drev)
579 result[newnode] = (oldnode, lastdiff, drev)
579
580
580 return result
581 return result
581
582
582
583
583 def getdrevmap(repo, revs):
584 def getdrevmap(repo, revs):
584 """Return a dict mapping each rev in `revs` to their Differential Revision
585 """Return a dict mapping each rev in `revs` to their Differential Revision
585 ID or None.
586 ID or None.
586 """
587 """
587 result = {}
588 result = {}
588 for rev in revs:
589 for rev in revs:
589 result[rev] = None
590 result[rev] = None
590 ctx = repo[rev]
591 ctx = repo[rev]
591 # Check commit message
592 # Check commit message
592 m = _differentialrevisiondescre.search(ctx.description())
593 m = _differentialrevisiondescre.search(ctx.description())
593 if m:
594 if m:
594 result[rev] = int(m.group('id'))
595 result[rev] = int(m.group('id'))
595 continue
596 continue
596 # Check tags
597 # Check tags
597 for tag in repo.nodetags(ctx.node()):
598 for tag in repo.nodetags(ctx.node()):
598 m = _differentialrevisiontagre.match(tag)
599 m = _differentialrevisiontagre.match(tag)
599 if m:
600 if m:
600 result[rev] = int(m.group(1))
601 result[rev] = int(m.group(1))
601 break
602 break
602
603
603 return result
604 return result
604
605
605
606
606 def getdiff(basectx, ctx, diffopts):
607 def getdiff(basectx, ctx, diffopts):
607 """plain-text diff without header (user, commit message, etc)"""
608 """plain-text diff without header (user, commit message, etc)"""
608 output = util.stringio()
609 output = util.stringio()
609 for chunk, _label in patch.diffui(
610 for chunk, _label in patch.diffui(
610 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
611 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
611 ):
612 ):
612 output.write(chunk)
613 output.write(chunk)
613 return output.getvalue()
614 return output.getvalue()
614
615
615
616
616 class DiffChangeType(object):
617 class DiffChangeType(object):
617 ADD = 1
618 ADD = 1
618 CHANGE = 2
619 CHANGE = 2
619 DELETE = 3
620 DELETE = 3
620 MOVE_AWAY = 4
621 MOVE_AWAY = 4
621 COPY_AWAY = 5
622 COPY_AWAY = 5
622 MOVE_HERE = 6
623 MOVE_HERE = 6
623 COPY_HERE = 7
624 COPY_HERE = 7
624 MULTICOPY = 8
625 MULTICOPY = 8
625
626
626
627
627 class DiffFileType(object):
628 class DiffFileType(object):
628 TEXT = 1
629 TEXT = 1
629 IMAGE = 2
630 IMAGE = 2
630 BINARY = 3
631 BINARY = 3
631
632
632
633
633 @attr.s
634 @attr.s
634 class phabhunk(dict):
635 class phabhunk(dict):
635 """Represents a Differential hunk, which is owned by a Differential change
636 """Represents a Differential hunk, which is owned by a Differential change
636 """
637 """
637
638
638 oldOffset = attr.ib(default=0) # camelcase-required
639 oldOffset = attr.ib(default=0) # camelcase-required
639 oldLength = attr.ib(default=0) # camelcase-required
640 oldLength = attr.ib(default=0) # camelcase-required
640 newOffset = attr.ib(default=0) # camelcase-required
641 newOffset = attr.ib(default=0) # camelcase-required
641 newLength = attr.ib(default=0) # camelcase-required
642 newLength = attr.ib(default=0) # camelcase-required
642 corpus = attr.ib(default='')
643 corpus = attr.ib(default='')
643 # These get added to the phabchange's equivalents
644 # These get added to the phabchange's equivalents
644 addLines = attr.ib(default=0) # camelcase-required
645 addLines = attr.ib(default=0) # camelcase-required
645 delLines = attr.ib(default=0) # camelcase-required
646 delLines = attr.ib(default=0) # camelcase-required
646
647
647
648
648 @attr.s
649 @attr.s
649 class phabchange(object):
650 class phabchange(object):
650 """Represents a Differential change, owns Differential hunks and owned by a
651 """Represents a Differential change, owns Differential hunks and owned by a
651 Differential diff. Each one represents one file in a diff.
652 Differential diff. Each one represents one file in a diff.
652 """
653 """
653
654
654 currentPath = attr.ib(default=None) # camelcase-required
655 currentPath = attr.ib(default=None) # camelcase-required
655 oldPath = attr.ib(default=None) # camelcase-required
656 oldPath = attr.ib(default=None) # camelcase-required
656 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
657 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
657 metadata = attr.ib(default=attr.Factory(dict))
658 metadata = attr.ib(default=attr.Factory(dict))
658 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
660 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
660 type = attr.ib(default=DiffChangeType.CHANGE)
661 type = attr.ib(default=DiffChangeType.CHANGE)
661 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
662 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
662 commitHash = attr.ib(default=None) # camelcase-required
663 commitHash = attr.ib(default=None) # camelcase-required
663 addLines = attr.ib(default=0) # camelcase-required
664 addLines = attr.ib(default=0) # camelcase-required
664 delLines = attr.ib(default=0) # camelcase-required
665 delLines = attr.ib(default=0) # camelcase-required
665 hunks = attr.ib(default=attr.Factory(list))
666 hunks = attr.ib(default=attr.Factory(list))
666
667
667 def copynewmetadatatoold(self):
668 def copynewmetadatatoold(self):
668 for key in list(self.metadata.keys()):
669 for key in list(self.metadata.keys()):
669 newkey = key.replace(b'new:', b'old:')
670 newkey = key.replace(b'new:', b'old:')
670 self.metadata[newkey] = self.metadata[key]
671 self.metadata[newkey] = self.metadata[key]
671
672
672 def addoldmode(self, value):
673 def addoldmode(self, value):
673 self.oldProperties[b'unix:filemode'] = value
674 self.oldProperties[b'unix:filemode'] = value
674
675
675 def addnewmode(self, value):
676 def addnewmode(self, value):
676 self.newProperties[b'unix:filemode'] = value
677 self.newProperties[b'unix:filemode'] = value
677
678
678 def addhunk(self, hunk):
679 def addhunk(self, hunk):
679 if not isinstance(hunk, phabhunk):
680 if not isinstance(hunk, phabhunk):
680 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
681 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
681 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
682 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
682 # It's useful to include these stats since the Phab web UI shows them,
683 # It's useful to include these stats since the Phab web UI shows them,
683 # and uses them to estimate how large a change a Revision is. Also used
684 # and uses them to estimate how large a change a Revision is. Also used
684 # in email subjects for the [+++--] bit.
685 # in email subjects for the [+++--] bit.
685 self.addLines += hunk.addLines
686 self.addLines += hunk.addLines
686 self.delLines += hunk.delLines
687 self.delLines += hunk.delLines
687
688
688
689
689 @attr.s
690 @attr.s
690 class phabdiff(object):
691 class phabdiff(object):
691 """Represents a Differential diff, owns Differential changes. Corresponds
692 """Represents a Differential diff, owns Differential changes. Corresponds
692 to a commit.
693 to a commit.
693 """
694 """
694
695
695 # Doesn't seem to be any reason to send this (output of uname -n)
696 # Doesn't seem to be any reason to send this (output of uname -n)
696 sourceMachine = attr.ib(default=b'') # camelcase-required
697 sourceMachine = attr.ib(default=b'') # camelcase-required
697 sourcePath = attr.ib(default=b'/') # camelcase-required
698 sourcePath = attr.ib(default=b'/') # camelcase-required
698 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
699 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
699 sourceControlPath = attr.ib(default=b'/') # camelcase-required
700 sourceControlPath = attr.ib(default=b'/') # camelcase-required
700 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
701 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
701 branch = attr.ib(default=b'default')
702 branch = attr.ib(default=b'default')
702 bookmark = attr.ib(default=None)
703 bookmark = attr.ib(default=None)
703 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
704 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
704 lintStatus = attr.ib(default=b'none') # camelcase-required
705 lintStatus = attr.ib(default=b'none') # camelcase-required
705 unitStatus = attr.ib(default=b'none') # camelcase-required
706 unitStatus = attr.ib(default=b'none') # camelcase-required
706 changes = attr.ib(default=attr.Factory(dict))
707 changes = attr.ib(default=attr.Factory(dict))
707 repositoryPHID = attr.ib(default=None) # camelcase-required
708 repositoryPHID = attr.ib(default=None) # camelcase-required
708
709
709 def addchange(self, change):
710 def addchange(self, change):
710 if not isinstance(change, phabchange):
711 if not isinstance(change, phabchange):
711 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
712 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
712 self.changes[change.currentPath] = pycompat.byteskwargs(
713 self.changes[change.currentPath] = pycompat.byteskwargs(
713 attr.asdict(change)
714 attr.asdict(change)
714 )
715 )
715
716
716
717
717 def maketext(pchange, basectx, ctx, fname):
718 def maketext(pchange, basectx, ctx, fname):
718 """populate the phabchange for a text file"""
719 """populate the phabchange for a text file"""
719 repo = ctx.repo()
720 repo = ctx.repo()
720 fmatcher = match.exact([fname])
721 fmatcher = match.exact([fname])
721 diffopts = mdiff.diffopts(git=True, context=32767)
722 diffopts = mdiff.diffopts(git=True, context=32767)
722 _pfctx, _fctx, header, fhunks = next(
723 _pfctx, _fctx, header, fhunks = next(
723 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
724 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
724 )
725 )
725
726
726 for fhunk in fhunks:
727 for fhunk in fhunks:
727 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
728 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
728 corpus = b''.join(lines[1:])
729 corpus = b''.join(lines[1:])
729 shunk = list(header)
730 shunk = list(header)
730 shunk.extend(lines)
731 shunk.extend(lines)
731 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
732 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
732 patch.diffstatdata(util.iterlines(shunk))
733 patch.diffstatdata(util.iterlines(shunk))
733 )
734 )
734 pchange.addhunk(
735 pchange.addhunk(
735 phabhunk(
736 phabhunk(
736 oldOffset,
737 oldOffset,
737 oldLength,
738 oldLength,
738 newOffset,
739 newOffset,
739 newLength,
740 newLength,
740 corpus,
741 corpus,
741 addLines,
742 addLines,
742 delLines,
743 delLines,
743 )
744 )
744 )
745 )
745
746
746
747
747 def uploadchunks(fctx, fphid):
748 def uploadchunks(fctx, fphid):
748 """upload large binary files as separate chunks.
749 """upload large binary files as separate chunks.
749 Phab requests chunking over 8MiB, and splits into 4MiB chunks
750 Phab requests chunking over 8MiB, and splits into 4MiB chunks
750 """
751 """
751 ui = fctx.repo().ui
752 ui = fctx.repo().ui
752 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
753 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
753 with ui.makeprogress(
754 with ui.makeprogress(
754 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
755 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
755 ) as progress:
756 ) as progress:
756 for chunk in chunks:
757 for chunk in chunks:
757 progress.increment()
758 progress.increment()
758 if chunk[b'complete']:
759 if chunk[b'complete']:
759 continue
760 continue
760 bstart = int(chunk[b'byteStart'])
761 bstart = int(chunk[b'byteStart'])
761 bend = int(chunk[b'byteEnd'])
762 bend = int(chunk[b'byteEnd'])
762 callconduit(
763 callconduit(
763 ui,
764 ui,
764 b'file.uploadchunk',
765 b'file.uploadchunk',
765 {
766 {
766 b'filePHID': fphid,
767 b'filePHID': fphid,
767 b'byteStart': bstart,
768 b'byteStart': bstart,
768 b'data': base64.b64encode(fctx.data()[bstart:bend]),
769 b'data': base64.b64encode(fctx.data()[bstart:bend]),
769 b'dataEncoding': b'base64',
770 b'dataEncoding': b'base64',
770 },
771 },
771 )
772 )
772
773
773
774
774 def uploadfile(fctx):
775 def uploadfile(fctx):
775 """upload binary files to Phabricator"""
776 """upload binary files to Phabricator"""
776 repo = fctx.repo()
777 repo = fctx.repo()
777 ui = repo.ui
778 ui = repo.ui
778 fname = fctx.path()
779 fname = fctx.path()
779 size = fctx.size()
780 size = fctx.size()
780 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
781 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
781
782
782 # an allocate call is required first to see if an upload is even required
783 # an allocate call is required first to see if an upload is even required
783 # (Phab might already have it) and to determine if chunking is needed
784 # (Phab might already have it) and to determine if chunking is needed
784 allocateparams = {
785 allocateparams = {
785 b'name': fname,
786 b'name': fname,
786 b'contentLength': size,
787 b'contentLength': size,
787 b'contentHash': fhash,
788 b'contentHash': fhash,
788 }
789 }
789 filealloc = callconduit(ui, b'file.allocate', allocateparams)
790 filealloc = callconduit(ui, b'file.allocate', allocateparams)
790 fphid = filealloc[b'filePHID']
791 fphid = filealloc[b'filePHID']
791
792
792 if filealloc[b'upload']:
793 if filealloc[b'upload']:
793 ui.write(_(b'uploading %s\n') % bytes(fctx))
794 ui.write(_(b'uploading %s\n') % bytes(fctx))
794 if not fphid:
795 if not fphid:
795 uploadparams = {
796 uploadparams = {
796 b'name': fname,
797 b'name': fname,
797 b'data_base64': base64.b64encode(fctx.data()),
798 b'data_base64': base64.b64encode(fctx.data()),
798 }
799 }
799 fphid = callconduit(ui, b'file.upload', uploadparams)
800 fphid = callconduit(ui, b'file.upload', uploadparams)
800 else:
801 else:
801 uploadchunks(fctx, fphid)
802 uploadchunks(fctx, fphid)
802 else:
803 else:
803 ui.debug(b'server already has %s\n' % bytes(fctx))
804 ui.debug(b'server already has %s\n' % bytes(fctx))
804
805
805 if not fphid:
806 if not fphid:
806 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
807 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
807
808
808 return fphid
809 return fphid
809
810
810
811
811 def addoldbinary(pchange, oldfctx, fctx):
812 def addoldbinary(pchange, oldfctx, fctx):
812 """add the metadata for the previous version of a binary file to the
813 """add the metadata for the previous version of a binary file to the
813 phabchange for the new version
814 phabchange for the new version
814
815
815 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
816 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
816 version of the file, or None if the file is being removed.
817 version of the file, or None if the file is being removed.
817 """
818 """
818 if not fctx or fctx.cmp(oldfctx):
819 if not fctx or fctx.cmp(oldfctx):
819 # Files differ, add the old one
820 # Files differ, add the old one
820 pchange.metadata[b'old:file:size'] = oldfctx.size()
821 pchange.metadata[b'old:file:size'] = oldfctx.size()
821 mimeguess, _enc = mimetypes.guess_type(
822 mimeguess, _enc = mimetypes.guess_type(
822 encoding.unifromlocal(oldfctx.path())
823 encoding.unifromlocal(oldfctx.path())
823 )
824 )
824 if mimeguess:
825 if mimeguess:
825 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
826 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
826 mimeguess
827 mimeguess
827 )
828 )
828 fphid = uploadfile(oldfctx)
829 fphid = uploadfile(oldfctx)
829 pchange.metadata[b'old:binary-phid'] = fphid
830 pchange.metadata[b'old:binary-phid'] = fphid
830 else:
831 else:
831 # If it's left as IMAGE/BINARY web UI might try to display it
832 # If it's left as IMAGE/BINARY web UI might try to display it
832 pchange.fileType = DiffFileType.TEXT
833 pchange.fileType = DiffFileType.TEXT
833 pchange.copynewmetadatatoold()
834 pchange.copynewmetadatatoold()
834
835
835
836
836 def makebinary(pchange, fctx):
837 def makebinary(pchange, fctx):
837 """populate the phabchange for a binary file"""
838 """populate the phabchange for a binary file"""
838 pchange.fileType = DiffFileType.BINARY
839 pchange.fileType = DiffFileType.BINARY
839 fphid = uploadfile(fctx)
840 fphid = uploadfile(fctx)
840 pchange.metadata[b'new:binary-phid'] = fphid
841 pchange.metadata[b'new:binary-phid'] = fphid
841 pchange.metadata[b'new:file:size'] = fctx.size()
842 pchange.metadata[b'new:file:size'] = fctx.size()
842 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
843 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
843 if mimeguess:
844 if mimeguess:
844 mimeguess = pycompat.bytestr(mimeguess)
845 mimeguess = pycompat.bytestr(mimeguess)
845 pchange.metadata[b'new:file:mime-type'] = mimeguess
846 pchange.metadata[b'new:file:mime-type'] = mimeguess
846 if mimeguess.startswith(b'image/'):
847 if mimeguess.startswith(b'image/'):
847 pchange.fileType = DiffFileType.IMAGE
848 pchange.fileType = DiffFileType.IMAGE
848
849
849
850
850 # Copied from mercurial/patch.py
851 # Copied from mercurial/patch.py
851 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
852 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
852
853
853
854
854 def notutf8(fctx):
855 def notutf8(fctx):
855 """detect non-UTF-8 text files since Phabricator requires them to be marked
856 """detect non-UTF-8 text files since Phabricator requires them to be marked
856 as binary
857 as binary
857 """
858 """
858 try:
859 try:
859 fctx.data().decode('utf-8')
860 fctx.data().decode('utf-8')
860 return False
861 return False
861 except UnicodeDecodeError:
862 except UnicodeDecodeError:
862 fctx.repo().ui.write(
863 fctx.repo().ui.write(
863 _(b'file %s detected as non-UTF-8, marked as binary\n')
864 _(b'file %s detected as non-UTF-8, marked as binary\n')
864 % fctx.path()
865 % fctx.path()
865 )
866 )
866 return True
867 return True
867
868
868
869
869 def addremoved(pdiff, basectx, ctx, removed):
870 def addremoved(pdiff, basectx, ctx, removed):
870 """add removed files to the phabdiff. Shouldn't include moves"""
871 """add removed files to the phabdiff. Shouldn't include moves"""
871 for fname in removed:
872 for fname in removed:
872 pchange = phabchange(
873 pchange = phabchange(
873 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
874 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
874 )
875 )
875 oldfctx = basectx.p1()[fname]
876 oldfctx = basectx.p1()[fname]
876 pchange.addoldmode(gitmode[oldfctx.flags()])
877 pchange.addoldmode(gitmode[oldfctx.flags()])
877 if not (oldfctx.isbinary() or notutf8(oldfctx)):
878 if not (oldfctx.isbinary() or notutf8(oldfctx)):
878 maketext(pchange, basectx, ctx, fname)
879 maketext(pchange, basectx, ctx, fname)
879
880
880 pdiff.addchange(pchange)
881 pdiff.addchange(pchange)
881
882
882
883
883 def addmodified(pdiff, basectx, ctx, modified):
884 def addmodified(pdiff, basectx, ctx, modified):
884 """add modified files to the phabdiff"""
885 """add modified files to the phabdiff"""
885 for fname in modified:
886 for fname in modified:
886 fctx = ctx[fname]
887 fctx = ctx[fname]
887 oldfctx = basectx.p1()[fname]
888 oldfctx = basectx.p1()[fname]
888 pchange = phabchange(currentPath=fname, oldPath=fname)
889 pchange = phabchange(currentPath=fname, oldPath=fname)
889 filemode = gitmode[fctx.flags()]
890 filemode = gitmode[fctx.flags()]
890 originalmode = gitmode[oldfctx.flags()]
891 originalmode = gitmode[oldfctx.flags()]
891 if filemode != originalmode:
892 if filemode != originalmode:
892 pchange.addoldmode(originalmode)
893 pchange.addoldmode(originalmode)
893 pchange.addnewmode(filemode)
894 pchange.addnewmode(filemode)
894
895
895 if (
896 if (
896 fctx.isbinary()
897 fctx.isbinary()
897 or notutf8(fctx)
898 or notutf8(fctx)
898 or oldfctx.isbinary()
899 or oldfctx.isbinary()
899 or notutf8(oldfctx)
900 or notutf8(oldfctx)
900 ):
901 ):
901 makebinary(pchange, fctx)
902 makebinary(pchange, fctx)
902 addoldbinary(pchange, oldfctx, fctx)
903 addoldbinary(pchange, oldfctx, fctx)
903 else:
904 else:
904 maketext(pchange, basectx, ctx, fname)
905 maketext(pchange, basectx, ctx, fname)
905
906
906 pdiff.addchange(pchange)
907 pdiff.addchange(pchange)
907
908
908
909
909 def addadded(pdiff, basectx, ctx, added, removed):
910 def addadded(pdiff, basectx, ctx, added, removed):
910 """add file adds to the phabdiff, both new files and copies/moves"""
911 """add file adds to the phabdiff, both new files and copies/moves"""
911 # Keep track of files that've been recorded as moved/copied, so if there are
912 # Keep track of files that've been recorded as moved/copied, so if there are
912 # additional copies we can mark them (moves get removed from removed)
913 # additional copies we can mark them (moves get removed from removed)
913 copiedchanges = {}
914 copiedchanges = {}
914 movedchanges = {}
915 movedchanges = {}
915
916
916 copy = {}
917 copy = {}
917 if basectx != ctx:
918 if basectx != ctx:
918 copy = copies.pathcopies(basectx.p1(), ctx)
919 copy = copies.pathcopies(basectx.p1(), ctx)
919
920
920 for fname in added:
921 for fname in added:
921 fctx = ctx[fname]
922 fctx = ctx[fname]
922 oldfctx = None
923 oldfctx = None
923 pchange = phabchange(currentPath=fname)
924 pchange = phabchange(currentPath=fname)
924
925
925 filemode = gitmode[fctx.flags()]
926 filemode = gitmode[fctx.flags()]
926
927
927 if copy:
928 if copy:
928 originalfname = copy.get(fname, fname)
929 originalfname = copy.get(fname, fname)
929 else:
930 else:
930 originalfname = fname
931 originalfname = fname
931 if fctx.renamed():
932 if fctx.renamed():
932 originalfname = fctx.renamed()[0]
933 originalfname = fctx.renamed()[0]
933
934
934 renamed = fname != originalfname
935 renamed = fname != originalfname
935
936
936 if renamed:
937 if renamed:
937 oldfctx = basectx.p1()[originalfname]
938 oldfctx = basectx.p1()[originalfname]
938 originalmode = gitmode[oldfctx.flags()]
939 originalmode = gitmode[oldfctx.flags()]
939 pchange.oldPath = originalfname
940 pchange.oldPath = originalfname
940
941
941 if originalfname in removed:
942 if originalfname in removed:
942 origpchange = phabchange(
943 origpchange = phabchange(
943 currentPath=originalfname,
944 currentPath=originalfname,
944 oldPath=originalfname,
945 oldPath=originalfname,
945 type=DiffChangeType.MOVE_AWAY,
946 type=DiffChangeType.MOVE_AWAY,
946 awayPaths=[fname],
947 awayPaths=[fname],
947 )
948 )
948 movedchanges[originalfname] = origpchange
949 movedchanges[originalfname] = origpchange
949 removed.remove(originalfname)
950 removed.remove(originalfname)
950 pchange.type = DiffChangeType.MOVE_HERE
951 pchange.type = DiffChangeType.MOVE_HERE
951 elif originalfname in movedchanges:
952 elif originalfname in movedchanges:
952 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
953 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
953 movedchanges[originalfname].awayPaths.append(fname)
954 movedchanges[originalfname].awayPaths.append(fname)
954 pchange.type = DiffChangeType.COPY_HERE
955 pchange.type = DiffChangeType.COPY_HERE
955 else: # pure copy
956 else: # pure copy
956 if originalfname not in copiedchanges:
957 if originalfname not in copiedchanges:
957 origpchange = phabchange(
958 origpchange = phabchange(
958 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
959 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
959 )
960 )
960 copiedchanges[originalfname] = origpchange
961 copiedchanges[originalfname] = origpchange
961 else:
962 else:
962 origpchange = copiedchanges[originalfname]
963 origpchange = copiedchanges[originalfname]
963 origpchange.awayPaths.append(fname)
964 origpchange.awayPaths.append(fname)
964 pchange.type = DiffChangeType.COPY_HERE
965 pchange.type = DiffChangeType.COPY_HERE
965
966
966 if filemode != originalmode:
967 if filemode != originalmode:
967 pchange.addoldmode(originalmode)
968 pchange.addoldmode(originalmode)
968 pchange.addnewmode(filemode)
969 pchange.addnewmode(filemode)
969 else: # Brand-new file
970 else: # Brand-new file
970 pchange.addnewmode(gitmode[fctx.flags()])
971 pchange.addnewmode(gitmode[fctx.flags()])
971 pchange.type = DiffChangeType.ADD
972 pchange.type = DiffChangeType.ADD
972
973
973 if (
974 if (
974 fctx.isbinary()
975 fctx.isbinary()
975 or notutf8(fctx)
976 or notutf8(fctx)
976 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
977 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
977 ):
978 ):
978 makebinary(pchange, fctx)
979 makebinary(pchange, fctx)
979 if renamed:
980 if renamed:
980 addoldbinary(pchange, oldfctx, fctx)
981 addoldbinary(pchange, oldfctx, fctx)
981 else:
982 else:
982 maketext(pchange, basectx, ctx, fname)
983 maketext(pchange, basectx, ctx, fname)
983
984
984 pdiff.addchange(pchange)
985 pdiff.addchange(pchange)
985
986
986 for _path, copiedchange in copiedchanges.items():
987 for _path, copiedchange in copiedchanges.items():
987 pdiff.addchange(copiedchange)
988 pdiff.addchange(copiedchange)
988 for _path, movedchange in movedchanges.items():
989 for _path, movedchange in movedchanges.items():
989 pdiff.addchange(movedchange)
990 pdiff.addchange(movedchange)
990
991
991
992
992 def creatediff(basectx, ctx):
993 def creatediff(basectx, ctx):
993 """create a Differential Diff"""
994 """create a Differential Diff"""
994 repo = ctx.repo()
995 repo = ctx.repo()
995 repophid = getrepophid(repo)
996 repophid = getrepophid(repo)
996 # Create a "Differential Diff" via "differential.creatediff" API
997 # Create a "Differential Diff" via "differential.creatediff" API
997 pdiff = phabdiff(
998 pdiff = phabdiff(
998 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
999 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
999 branch=b'%s' % ctx.branch(),
1000 branch=b'%s' % ctx.branch(),
1000 )
1001 )
1001 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1002 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1002 # addadded will remove moved files from removed, so addremoved won't get
1003 # addadded will remove moved files from removed, so addremoved won't get
1003 # them
1004 # them
1004 addadded(pdiff, basectx, ctx, added, removed)
1005 addadded(pdiff, basectx, ctx, added, removed)
1005 addmodified(pdiff, basectx, ctx, modified)
1006 addmodified(pdiff, basectx, ctx, modified)
1006 addremoved(pdiff, basectx, ctx, removed)
1007 addremoved(pdiff, basectx, ctx, removed)
1007 if repophid:
1008 if repophid:
1008 pdiff.repositoryPHID = repophid
1009 pdiff.repositoryPHID = repophid
1009 diff = callconduit(
1010 diff = callconduit(
1010 repo.ui,
1011 repo.ui,
1011 b'differential.creatediff',
1012 b'differential.creatediff',
1012 pycompat.byteskwargs(attr.asdict(pdiff)),
1013 pycompat.byteskwargs(attr.asdict(pdiff)),
1013 )
1014 )
1014 if not diff:
1015 if not diff:
1015 if basectx != ctx:
1016 if basectx != ctx:
1016 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1017 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1017 else:
1018 else:
1018 msg = _(b'cannot create diff for %s') % ctx
1019 msg = _(b'cannot create diff for %s') % ctx
1019 raise error.Abort(msg)
1020 raise error.Abort(msg)
1020 return diff
1021 return diff
1021
1022
1022
1023
1023 def writediffproperties(ctxs, diff):
1024 def writediffproperties(ctxs, diff):
1024 """write metadata to diff so patches could be applied losslessly
1025 """write metadata to diff so patches could be applied losslessly
1025
1026
1026 ``ctxs`` is the list of commits that created the diff, in ascending order.
1027 ``ctxs`` is the list of commits that created the diff, in ascending order.
1027 The list is generally a single commit, but may be several when using
1028 The list is generally a single commit, but may be several when using
1028 ``phabsend --fold``.
1029 ``phabsend --fold``.
1029 """
1030 """
1030 # creatediff returns with a diffid but query returns with an id
1031 # creatediff returns with a diffid but query returns with an id
1031 diffid = diff.get(b'diffid', diff.get(b'id'))
1032 diffid = diff.get(b'diffid', diff.get(b'id'))
1032 basectx = ctxs[0]
1033 basectx = ctxs[0]
1033 tipctx = ctxs[-1]
1034 tipctx = ctxs[-1]
1034
1035
1035 params = {
1036 params = {
1036 b'diff_id': diffid,
1037 b'diff_id': diffid,
1037 b'name': b'hg:meta',
1038 b'name': b'hg:meta',
1038 b'data': templatefilters.json(
1039 b'data': templatefilters.json(
1039 {
1040 {
1040 b'user': tipctx.user(),
1041 b'user': tipctx.user(),
1041 b'date': b'%d %d' % tipctx.date(),
1042 b'date': b'%d %d' % tipctx.date(),
1042 b'branch': tipctx.branch(),
1043 b'branch': tipctx.branch(),
1043 b'node': tipctx.hex(),
1044 b'node': tipctx.hex(),
1044 b'parent': basectx.p1().hex(),
1045 b'parent': basectx.p1().hex(),
1045 }
1046 }
1046 ),
1047 ),
1047 }
1048 }
1048 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1049 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1049
1050
1050 commits = {}
1051 commits = {}
1051 for ctx in ctxs:
1052 for ctx in ctxs:
1052 commits[ctx.hex()] = {
1053 commits[ctx.hex()] = {
1053 b'author': stringutil.person(ctx.user()),
1054 b'author': stringutil.person(ctx.user()),
1054 b'authorEmail': stringutil.email(ctx.user()),
1055 b'authorEmail': stringutil.email(ctx.user()),
1055 b'time': int(ctx.date()[0]),
1056 b'time': int(ctx.date()[0]),
1056 b'commit': ctx.hex(),
1057 b'commit': ctx.hex(),
1057 b'parents': [ctx.p1().hex()],
1058 b'parents': [ctx.p1().hex()],
1058 b'branch': ctx.branch(),
1059 b'branch': ctx.branch(),
1059 }
1060 }
1060 params = {
1061 params = {
1061 b'diff_id': diffid,
1062 b'diff_id': diffid,
1062 b'name': b'local:commits',
1063 b'name': b'local:commits',
1063 b'data': templatefilters.json(commits),
1064 b'data': templatefilters.json(commits),
1064 }
1065 }
1065 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1066 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1066
1067
1067
1068
1068 def createdifferentialrevision(
1069 def createdifferentialrevision(
1069 ctxs,
1070 ctxs,
1070 revid=None,
1071 revid=None,
1071 parentrevphid=None,
1072 parentrevphid=None,
1072 oldbasenode=None,
1073 oldbasenode=None,
1073 oldnode=None,
1074 oldnode=None,
1074 olddiff=None,
1075 olddiff=None,
1075 actions=None,
1076 actions=None,
1076 comment=None,
1077 comment=None,
1077 ):
1078 ):
1078 """create or update a Differential Revision
1079 """create or update a Differential Revision
1079
1080
1080 If revid is None, create a new Differential Revision, otherwise update
1081 If revid is None, create a new Differential Revision, otherwise update
1081 revid. If parentrevphid is not None, set it as a dependency.
1082 revid. If parentrevphid is not None, set it as a dependency.
1082
1083
1083 If there is a single commit for the new Differential Revision, ``ctxs`` will
1084 If there is a single commit for the new Differential Revision, ``ctxs`` will
1084 be a list of that single context. Otherwise, it is a list that covers the
1085 be a list of that single context. Otherwise, it is a list that covers the
1085 range of changes for the differential, where ``ctxs[0]`` is the first change
1086 range of changes for the differential, where ``ctxs[0]`` is the first change
1086 to include and ``ctxs[-1]`` is the last.
1087 to include and ``ctxs[-1]`` is the last.
1087
1088
1088 If oldnode is not None, check if the patch content (without commit message
1089 If oldnode is not None, check if the patch content (without commit message
1089 and metadata) has changed before creating another diff. For a Revision with
1090 and metadata) has changed before creating another diff. For a Revision with
1090 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1091 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1091 Revision covering multiple commits, ``oldbasenode`` corresponds to
1092 Revision covering multiple commits, ``oldbasenode`` corresponds to
1092 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1093 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1093 corresponds to ``ctxs[-1]``.
1094 corresponds to ``ctxs[-1]``.
1094
1095
1095 If actions is not None, they will be appended to the transaction.
1096 If actions is not None, they will be appended to the transaction.
1096 """
1097 """
1097 ctx = ctxs[-1]
1098 ctx = ctxs[-1]
1098 basectx = ctxs[0]
1099 basectx = ctxs[0]
1099
1100
1100 repo = ctx.repo()
1101 repo = ctx.repo()
1101 if oldnode:
1102 if oldnode:
1102 diffopts = mdiff.diffopts(git=True, context=32767)
1103 diffopts = mdiff.diffopts(git=True, context=32767)
1103 unfi = repo.unfiltered()
1104 unfi = repo.unfiltered()
1104 oldctx = unfi[oldnode]
1105 oldctx = unfi[oldnode]
1105 oldbasectx = unfi[oldbasenode]
1106 oldbasectx = unfi[oldbasenode]
1106 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1107 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1107 oldbasectx, oldctx, diffopts
1108 oldbasectx, oldctx, diffopts
1108 )
1109 )
1109 else:
1110 else:
1110 neednewdiff = True
1111 neednewdiff = True
1111
1112
1112 transactions = []
1113 transactions = []
1113 if neednewdiff:
1114 if neednewdiff:
1114 diff = creatediff(basectx, ctx)
1115 diff = creatediff(basectx, ctx)
1115 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1116 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1116 if comment:
1117 if comment:
1117 transactions.append({b'type': b'comment', b'value': comment})
1118 transactions.append({b'type': b'comment', b'value': comment})
1118 else:
1119 else:
1119 # Even if we don't need to upload a new diff because the patch content
1120 # Even if we don't need to upload a new diff because the patch content
1120 # does not change. We might still need to update its metadata so
1121 # does not change. We might still need to update its metadata so
1121 # pushers could know the correct node metadata.
1122 # pushers could know the correct node metadata.
1122 assert olddiff
1123 assert olddiff
1123 diff = olddiff
1124 diff = olddiff
1124 writediffproperties(ctxs, diff)
1125 writediffproperties(ctxs, diff)
1125
1126
1126 # Set the parent Revision every time, so commit re-ordering is picked-up
1127 # Set the parent Revision every time, so commit re-ordering is picked-up
1127 if parentrevphid:
1128 if parentrevphid:
1128 transactions.append(
1129 transactions.append(
1129 {b'type': b'parents.set', b'value': [parentrevphid]}
1130 {b'type': b'parents.set', b'value': [parentrevphid]}
1130 )
1131 )
1131
1132
1132 if actions:
1133 if actions:
1133 transactions += actions
1134 transactions += actions
1134
1135
1135 # When folding multiple local commits into a single review, arcanist will
1136 # When folding multiple local commits into a single review, arcanist will
1136 # take the summary line of the first commit as the title, and then
1137 # take the summary line of the first commit as the title, and then
1137 # concatenate the rest of the remaining messages (including each of their
1138 # concatenate the rest of the remaining messages (including each of their
1138 # first lines) to the rest of the first commit message (each separated by
1139 # first lines) to the rest of the first commit message (each separated by
1139 # an empty line), and use that as the summary field. Do the same here.
1140 # an empty line), and use that as the summary field. Do the same here.
1140 # For commits with only a one line message, there is no summary field, as
1141 # For commits with only a one line message, there is no summary field, as
1141 # this gets assigned to the title.
1142 # this gets assigned to the title.
1142 fields = util.sortdict() # sorted for stable wire protocol in tests
1143 fields = util.sortdict() # sorted for stable wire protocol in tests
1143
1144
1144 for i, _ctx in enumerate(ctxs):
1145 for i, _ctx in enumerate(ctxs):
1145 # Parse commit message and update related fields.
1146 # Parse commit message and update related fields.
1146 desc = _ctx.description()
1147 desc = _ctx.description()
1147 info = callconduit(
1148 info = callconduit(
1148 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1149 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1149 )
1150 )
1150
1151
1151 for k in [b'title', b'summary', b'testPlan']:
1152 for k in [b'title', b'summary', b'testPlan']:
1152 v = info[b'fields'].get(k)
1153 v = info[b'fields'].get(k)
1153 if not v:
1154 if not v:
1154 continue
1155 continue
1155
1156
1156 if i == 0:
1157 if i == 0:
1157 # Title, summary and test plan (if present) are taken verbatim
1158 # Title, summary and test plan (if present) are taken verbatim
1158 # for the first commit.
1159 # for the first commit.
1159 fields[k] = v.rstrip()
1160 fields[k] = v.rstrip()
1160 continue
1161 continue
1161 elif k == b'title':
1162 elif k == b'title':
1162 # Add subsequent titles (i.e. the first line of the commit
1163 # Add subsequent titles (i.e. the first line of the commit
1163 # message) back to the summary.
1164 # message) back to the summary.
1164 k = b'summary'
1165 k = b'summary'
1165
1166
1166 # Append any current field to the existing composite field
1167 # Append any current field to the existing composite field
1167 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1168 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1168
1169
1169 for k, v in fields.items():
1170 for k, v in fields.items():
1170 transactions.append({b'type': k, b'value': v})
1171 transactions.append({b'type': k, b'value': v})
1171
1172
1172 params = {b'transactions': transactions}
1173 params = {b'transactions': transactions}
1173 if revid is not None:
1174 if revid is not None:
1174 # Update an existing Differential Revision
1175 # Update an existing Differential Revision
1175 params[b'objectIdentifier'] = revid
1176 params[b'objectIdentifier'] = revid
1176
1177
1177 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1178 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1178 if not revision:
1179 if not revision:
1179 if len(ctxs) == 1:
1180 if len(ctxs) == 1:
1180 msg = _(b'cannot create revision for %s') % ctx
1181 msg = _(b'cannot create revision for %s') % ctx
1181 else:
1182 else:
1182 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1183 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1183 raise error.Abort(msg)
1184 raise error.Abort(msg)
1184
1185
1185 return revision, diff
1186 return revision, diff
1186
1187
1187
1188
1188 def userphids(ui, names):
1189 def userphids(ui, names):
1189 """convert user names to PHIDs"""
1190 """convert user names to PHIDs"""
1190 names = [name.lower() for name in names]
1191 names = [name.lower() for name in names]
1191 query = {b'constraints': {b'usernames': names}}
1192 query = {b'constraints': {b'usernames': names}}
1192 result = callconduit(ui, b'user.search', query)
1193 result = callconduit(ui, b'user.search', query)
1193 # username not found is not an error of the API. So check if we have missed
1194 # username not found is not an error of the API. So check if we have missed
1194 # some names here.
1195 # some names here.
1195 data = result[b'data']
1196 data = result[b'data']
1196 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1197 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1197 unresolved = set(names) - resolved
1198 unresolved = set(names) - resolved
1198 if unresolved:
1199 if unresolved:
1199 raise error.Abort(
1200 raise error.Abort(
1200 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1201 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1201 )
1202 )
1202 return [entry[b'phid'] for entry in data]
1203 return [entry[b'phid'] for entry in data]
1203
1204
1204
1205
1205 def _print_phabsend_action(ui, ctx, newrevid, action):
1206 def _print_phabsend_action(ui, ctx, newrevid, action):
1206 """print the ``action`` that occurred when posting ``ctx`` for review
1207 """print the ``action`` that occurred when posting ``ctx`` for review
1207
1208
1208 This is a utility function for the sending phase of ``phabsend``, which
1209 This is a utility function for the sending phase of ``phabsend``, which
1209 makes it easier to show a status for all local commits with `--fold``.
1210 makes it easier to show a status for all local commits with `--fold``.
1210 """
1211 """
1211 actiondesc = ui.label(
1212 actiondesc = ui.label(
1212 {
1213 {
1213 b'created': _(b'created'),
1214 b'created': _(b'created'),
1214 b'skipped': _(b'skipped'),
1215 b'skipped': _(b'skipped'),
1215 b'updated': _(b'updated'),
1216 b'updated': _(b'updated'),
1216 }[action],
1217 }[action],
1217 b'phabricator.action.%s' % action,
1218 b'phabricator.action.%s' % action,
1218 )
1219 )
1219 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1220 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1220 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1221 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1221 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1222 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1222 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1223 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1223
1224
1224
1225
1225 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1226 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1226 """update the local commit list for the ``diff`` associated with ``drevid``
1227 """update the local commit list for the ``diff`` associated with ``drevid``
1227
1228
1228 This is a utility function for the amend phase of ``phabsend``, which
1229 This is a utility function for the amend phase of ``phabsend``, which
1229 converts failures to warning messages.
1230 converts failures to warning messages.
1230 """
1231 """
1231 _debug(
1232 _debug(
1232 unfi.ui,
1233 unfi.ui,
1233 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1234 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1234 )
1235 )
1235
1236
1236 try:
1237 try:
1237 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1238 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1238 except util.urlerr.urlerror:
1239 except util.urlerr.urlerror:
1239 # If it fails just warn and keep going, otherwise the DREV
1240 # If it fails just warn and keep going, otherwise the DREV
1240 # associations will be lost
1241 # associations will be lost
1241 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1242 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1242
1243
1243
1244
1244 @vcrcommand(
1245 @vcrcommand(
1245 b'phabsend',
1246 b'phabsend',
1246 [
1247 [
1247 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1248 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1248 (b'', b'amend', True, _(b'update commit messages')),
1249 (b'', b'amend', True, _(b'update commit messages')),
1249 (b'', b'reviewer', [], _(b'specify reviewers')),
1250 (b'', b'reviewer', [], _(b'specify reviewers')),
1250 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1251 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1251 (
1252 (
1252 b'm',
1253 b'm',
1253 b'comment',
1254 b'comment',
1254 b'',
1255 b'',
1255 _(b'add a comment to Revisions with new/updated Diffs'),
1256 _(b'add a comment to Revisions with new/updated Diffs'),
1256 ),
1257 ),
1257 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1258 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1258 (b'', b'fold', False, _(b'combine the revisions into one review')),
1259 (b'', b'fold', False, _(b'combine the revisions into one review')),
1259 ],
1260 ],
1260 _(b'REV [OPTIONS]'),
1261 _(b'REV [OPTIONS]'),
1261 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1262 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1262 )
1263 )
1263 def phabsend(ui, repo, *revs, **opts):
1264 def phabsend(ui, repo, *revs, **opts):
1264 """upload changesets to Phabricator
1265 """upload changesets to Phabricator
1265
1266
1266 If there are multiple revisions specified, they will be send as a stack
1267 If there are multiple revisions specified, they will be send as a stack
1267 with a linear dependencies relationship using the order specified by the
1268 with a linear dependencies relationship using the order specified by the
1268 revset.
1269 revset.
1269
1270
1270 For the first time uploading changesets, local tags will be created to
1271 For the first time uploading changesets, local tags will be created to
1271 maintain the association. After the first time, phabsend will check
1272 maintain the association. After the first time, phabsend will check
1272 obsstore and tags information so it can figure out whether to update an
1273 obsstore and tags information so it can figure out whether to update an
1273 existing Differential Revision, or create a new one.
1274 existing Differential Revision, or create a new one.
1274
1275
1275 If --amend is set, update commit messages so they have the
1276 If --amend is set, update commit messages so they have the
1276 ``Differential Revision`` URL, remove related tags. This is similar to what
1277 ``Differential Revision`` URL, remove related tags. This is similar to what
1277 arcanist will do, and is more desired in author-push workflows. Otherwise,
1278 arcanist will do, and is more desired in author-push workflows. Otherwise,
1278 use local tags to record the ``Differential Revision`` association.
1279 use local tags to record the ``Differential Revision`` association.
1279
1280
1280 The --confirm option lets you confirm changesets before sending them. You
1281 The --confirm option lets you confirm changesets before sending them. You
1281 can also add following to your configuration file to make it default
1282 can also add following to your configuration file to make it default
1282 behaviour::
1283 behaviour::
1283
1284
1284 [phabsend]
1285 [phabsend]
1285 confirm = true
1286 confirm = true
1286
1287
1287 By default, a separate review will be created for each commit that is
1288 By default, a separate review will be created for each commit that is
1288 selected, and will have the same parent/child relationship in Phabricator.
1289 selected, and will have the same parent/child relationship in Phabricator.
1289 If ``--fold`` is set, multiple commits are rolled up into a single review
1290 If ``--fold`` is set, multiple commits are rolled up into a single review
1290 as if diffed from the parent of the first revision to the last. The commit
1291 as if diffed from the parent of the first revision to the last. The commit
1291 messages are concatenated in the summary field on Phabricator.
1292 messages are concatenated in the summary field on Phabricator.
1292
1293
1293 phabsend will check obsstore and the above association to decide whether to
1294 phabsend will check obsstore and the above association to decide whether to
1294 update an existing Differential Revision, or create a new one.
1295 update an existing Differential Revision, or create a new one.
1295 """
1296 """
1296 opts = pycompat.byteskwargs(opts)
1297 opts = pycompat.byteskwargs(opts)
1297 revs = list(revs) + opts.get(b'rev', [])
1298 revs = list(revs) + opts.get(b'rev', [])
1298 revs = scmutil.revrange(repo, revs)
1299 revs = scmutil.revrange(repo, revs)
1299 revs.sort() # ascending order to preserve topological parent/child in phab
1300 revs.sort() # ascending order to preserve topological parent/child in phab
1300
1301
1301 if not revs:
1302 if not revs:
1302 raise error.Abort(_(b'phabsend requires at least one changeset'))
1303 raise error.Abort(_(b'phabsend requires at least one changeset'))
1303 if opts.get(b'amend'):
1304 if opts.get(b'amend'):
1304 cmdutil.checkunfinished(repo)
1305 cmdutil.checkunfinished(repo)
1305
1306
1306 ctxs = [repo[rev] for rev in revs]
1307 ctxs = [repo[rev] for rev in revs]
1307
1308
1308 if any(c for c in ctxs if c.obsolete()):
1309 if any(c for c in ctxs if c.obsolete()):
1309 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1310 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1310
1311
1311 # Ensure the local commits are an unbroken range. The semantics of the
1312 # Ensure the local commits are an unbroken range. The semantics of the
1312 # --fold option implies this, and the auto restacking of orphans requires
1313 # --fold option implies this, and the auto restacking of orphans requires
1313 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1314 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1314 # get A' as a parent.
1315 # get A' as a parent.
1315 def _fail_nonlinear_revs(revs, revtype):
1316 def _fail_nonlinear_revs(revs, revtype):
1316 badnodes = [repo[r].node() for r in revs]
1317 badnodes = [repo[r].node() for r in revs]
1317 raise error.Abort(
1318 raise error.Abort(
1318 _(b"cannot phabsend multiple %s revisions: %s")
1319 _(b"cannot phabsend multiple %s revisions: %s")
1319 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1320 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1320 hint=_(b"the revisions must form a linear chain"),
1321 hint=_(b"the revisions must form a linear chain"),
1321 )
1322 )
1322
1323
1323 heads = repo.revs(b'heads(%ld)', revs)
1324 heads = repo.revs(b'heads(%ld)', revs)
1324 if len(heads) > 1:
1325 if len(heads) > 1:
1325 _fail_nonlinear_revs(heads, b"head")
1326 _fail_nonlinear_revs(heads, b"head")
1326
1327
1327 roots = repo.revs(b'roots(%ld)', revs)
1328 roots = repo.revs(b'roots(%ld)', revs)
1328 if len(roots) > 1:
1329 if len(roots) > 1:
1329 _fail_nonlinear_revs(roots, b"root")
1330 _fail_nonlinear_revs(roots, b"root")
1330
1331
1331 fold = opts.get(b'fold')
1332 fold = opts.get(b'fold')
1332 if fold:
1333 if fold:
1333 if len(revs) == 1:
1334 if len(revs) == 1:
1334 # TODO: just switch to --no-fold instead?
1335 # TODO: just switch to --no-fold instead?
1335 raise error.Abort(_(b"cannot fold a single revision"))
1336 raise error.Abort(_(b"cannot fold a single revision"))
1336
1337
1337 # There's no clear way to manage multiple commits with a Dxxx tag, so
1338 # There's no clear way to manage multiple commits with a Dxxx tag, so
1338 # require the amend option. (We could append "_nnn", but then it
1339 # require the amend option. (We could append "_nnn", but then it
1339 # becomes jumbled if earlier commits are added to an update.) It should
1340 # becomes jumbled if earlier commits are added to an update.) It should
1340 # lock the repo and ensure that the range is editable, but that would
1341 # lock the repo and ensure that the range is editable, but that would
1341 # make the code pretty convoluted. The default behavior of `arc` is to
1342 # make the code pretty convoluted. The default behavior of `arc` is to
1342 # create a new review anyway.
1343 # create a new review anyway.
1343 if not opts.get(b"amend"):
1344 if not opts.get(b"amend"):
1344 raise error.Abort(_(b"cannot fold with --no-amend"))
1345 raise error.Abort(_(b"cannot fold with --no-amend"))
1345
1346
1346 # It might be possible to bucketize the revisions by the DREV value, and
1347 # It might be possible to bucketize the revisions by the DREV value, and
1347 # iterate over those groups when posting, and then again when amending.
1348 # iterate over those groups when posting, and then again when amending.
1348 # But for simplicity, require all selected revisions to be for the same
1349 # But for simplicity, require all selected revisions to be for the same
1349 # DREV (if present). Adding local revisions to an existing DREV is
1350 # DREV (if present). Adding local revisions to an existing DREV is
1350 # acceptable.
1351 # acceptable.
1351 drevmatchers = [
1352 drevmatchers = [
1352 _differentialrevisiondescre.search(ctx.description())
1353 _differentialrevisiondescre.search(ctx.description())
1353 for ctx in ctxs
1354 for ctx in ctxs
1354 ]
1355 ]
1355 if len({m.group('url') for m in drevmatchers if m}) > 1:
1356 if len({m.group('url') for m in drevmatchers if m}) > 1:
1356 raise error.Abort(
1357 raise error.Abort(
1357 _(b"cannot fold revisions with different DREV values")
1358 _(b"cannot fold revisions with different DREV values")
1358 )
1359 )
1359
1360
1360 # {newnode: (oldnode, olddiff, olddrev}
1361 # {newnode: (oldnode, olddiff, olddrev}
1361 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1362 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1362
1363
1363 confirm = ui.configbool(b'phabsend', b'confirm')
1364 confirm = ui.configbool(b'phabsend', b'confirm')
1364 confirm |= bool(opts.get(b'confirm'))
1365 confirm |= bool(opts.get(b'confirm'))
1365 if confirm:
1366 if confirm:
1366 confirmed = _confirmbeforesend(repo, revs, oldmap)
1367 confirmed = _confirmbeforesend(repo, revs, oldmap)
1367 if not confirmed:
1368 if not confirmed:
1368 raise error.Abort(_(b'phabsend cancelled'))
1369 raise error.Abort(_(b'phabsend cancelled'))
1369
1370
1370 actions = []
1371 actions = []
1371 reviewers = opts.get(b'reviewer', [])
1372 reviewers = opts.get(b'reviewer', [])
1372 blockers = opts.get(b'blocker', [])
1373 blockers = opts.get(b'blocker', [])
1373 phids = []
1374 phids = []
1374 if reviewers:
1375 if reviewers:
1375 phids.extend(userphids(repo.ui, reviewers))
1376 phids.extend(userphids(repo.ui, reviewers))
1376 if blockers:
1377 if blockers:
1377 phids.extend(
1378 phids.extend(
1378 map(
1379 map(
1379 lambda phid: b'blocking(%s)' % phid,
1380 lambda phid: b'blocking(%s)' % phid,
1380 userphids(repo.ui, blockers),
1381 userphids(repo.ui, blockers),
1381 )
1382 )
1382 )
1383 )
1383 if phids:
1384 if phids:
1384 actions.append({b'type': b'reviewers.add', b'value': phids})
1385 actions.append({b'type': b'reviewers.add', b'value': phids})
1385
1386
1386 drevids = [] # [int]
1387 drevids = [] # [int]
1387 diffmap = {} # {newnode: diff}
1388 diffmap = {} # {newnode: diff}
1388
1389
1389 # Send patches one by one so we know their Differential Revision PHIDs and
1390 # Send patches one by one so we know their Differential Revision PHIDs and
1390 # can provide dependency relationship
1391 # can provide dependency relationship
1391 lastrevphid = None
1392 lastrevphid = None
1392 for ctx in ctxs:
1393 for ctx in ctxs:
1393 if fold:
1394 if fold:
1394 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1395 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1395 else:
1396 else:
1396 ui.debug(b'sending rev %d\n' % ctx.rev())
1397 ui.debug(b'sending rev %d\n' % ctx.rev())
1397
1398
1398 # Get Differential Revision ID
1399 # Get Differential Revision ID
1399 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1400 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1400 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1401 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1401
1402
1402 if fold:
1403 if fold:
1403 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1404 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1404 ctxs[-1].node(), (None, None, None)
1405 ctxs[-1].node(), (None, None, None)
1405 )
1406 )
1406
1407
1407 if oldnode != ctx.node() or opts.get(b'amend'):
1408 if oldnode != ctx.node() or opts.get(b'amend'):
1408 # Create or update Differential Revision
1409 # Create or update Differential Revision
1409 revision, diff = createdifferentialrevision(
1410 revision, diff = createdifferentialrevision(
1410 ctxs if fold else [ctx],
1411 ctxs if fold else [ctx],
1411 revid,
1412 revid,
1412 lastrevphid,
1413 lastrevphid,
1413 oldbasenode,
1414 oldbasenode,
1414 oldnode,
1415 oldnode,
1415 olddiff,
1416 olddiff,
1416 actions,
1417 actions,
1417 opts.get(b'comment'),
1418 opts.get(b'comment'),
1418 )
1419 )
1419
1420
1420 if fold:
1421 if fold:
1421 for ctx in ctxs:
1422 for ctx in ctxs:
1422 diffmap[ctx.node()] = diff
1423 diffmap[ctx.node()] = diff
1423 else:
1424 else:
1424 diffmap[ctx.node()] = diff
1425 diffmap[ctx.node()] = diff
1425
1426
1426 newrevid = int(revision[b'object'][b'id'])
1427 newrevid = int(revision[b'object'][b'id'])
1427 newrevphid = revision[b'object'][b'phid']
1428 newrevphid = revision[b'object'][b'phid']
1428 if revid:
1429 if revid:
1429 action = b'updated'
1430 action = b'updated'
1430 else:
1431 else:
1431 action = b'created'
1432 action = b'created'
1432
1433
1433 # Create a local tag to note the association, if commit message
1434 # Create a local tag to note the association, if commit message
1434 # does not have it already
1435 # does not have it already
1435 if not fold:
1436 if not fold:
1436 m = _differentialrevisiondescre.search(ctx.description())
1437 m = _differentialrevisiondescre.search(ctx.description())
1437 if not m or int(m.group('id')) != newrevid:
1438 if not m or int(m.group('id')) != newrevid:
1438 tagname = b'D%d' % newrevid
1439 tagname = b'D%d' % newrevid
1439 tags.tag(
1440 tags.tag(
1440 repo,
1441 repo,
1441 tagname,
1442 tagname,
1442 ctx.node(),
1443 ctx.node(),
1443 message=None,
1444 message=None,
1444 user=None,
1445 user=None,
1445 date=None,
1446 date=None,
1446 local=True,
1447 local=True,
1447 )
1448 )
1448 else:
1449 else:
1449 # Nothing changed. But still set "newrevphid" so the next revision
1450 # Nothing changed. But still set "newrevphid" so the next revision
1450 # could depend on this one and "newrevid" for the summary line.
1451 # could depend on this one and "newrevid" for the summary line.
1451 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1452 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1452 newrevid = revid
1453 newrevid = revid
1453 action = b'skipped'
1454 action = b'skipped'
1454
1455
1455 drevids.append(newrevid)
1456 drevids.append(newrevid)
1456 lastrevphid = newrevphid
1457 lastrevphid = newrevphid
1457
1458
1458 if fold:
1459 if fold:
1459 for c in ctxs:
1460 for c in ctxs:
1460 if oldmap.get(c.node(), (None, None, None))[2]:
1461 if oldmap.get(c.node(), (None, None, None))[2]:
1461 action = b'updated'
1462 action = b'updated'
1462 else:
1463 else:
1463 action = b'created'
1464 action = b'created'
1464 _print_phabsend_action(ui, c, newrevid, action)
1465 _print_phabsend_action(ui, c, newrevid, action)
1465 break
1466 break
1466
1467
1467 _print_phabsend_action(ui, ctx, newrevid, action)
1468 _print_phabsend_action(ui, ctx, newrevid, action)
1468
1469
1469 # Update commit messages and remove tags
1470 # Update commit messages and remove tags
1470 if opts.get(b'amend'):
1471 if opts.get(b'amend'):
1471 unfi = repo.unfiltered()
1472 unfi = repo.unfiltered()
1472 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1473 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1473 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1474 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1474 # Eagerly evaluate commits to restabilize before creating new
1475 # Eagerly evaluate commits to restabilize before creating new
1475 # commits. The selected revisions are excluded because they are
1476 # commits. The selected revisions are excluded because they are
1476 # automatically restacked as part of the submission process.
1477 # automatically restacked as part of the submission process.
1477 restack = [
1478 restack = [
1478 c
1479 c
1479 for c in repo.set(
1480 for c in repo.set(
1480 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1481 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1481 revs,
1482 revs,
1482 revs,
1483 revs,
1483 )
1484 )
1484 ]
1485 ]
1485 wnode = unfi[b'.'].node()
1486 wnode = unfi[b'.'].node()
1486 mapping = {} # {oldnode: [newnode]}
1487 mapping = {} # {oldnode: [newnode]}
1487 newnodes = []
1488 newnodes = []
1488
1489
1489 drevid = drevids[0]
1490 drevid = drevids[0]
1490
1491
1491 for i, rev in enumerate(revs):
1492 for i, rev in enumerate(revs):
1492 old = unfi[rev]
1493 old = unfi[rev]
1493 if not fold:
1494 if not fold:
1494 drevid = drevids[i]
1495 drevid = drevids[i]
1495 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1496 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1496
1497
1497 newdesc = get_amended_desc(drev, old, fold)
1498 newdesc = get_amended_desc(drev, old, fold)
1498 # Make sure commit message contain "Differential Revision"
1499 # Make sure commit message contain "Differential Revision"
1499 if (
1500 if (
1500 old.description() != newdesc
1501 old.description() != newdesc
1501 or old.p1().node() in mapping
1502 or old.p1().node() in mapping
1502 or old.p2().node() in mapping
1503 or old.p2().node() in mapping
1503 ):
1504 ):
1504 if old.phase() == phases.public:
1505 if old.phase() == phases.public:
1505 ui.warn(
1506 ui.warn(
1506 _(b"warning: not updating public commit %s\n")
1507 _(b"warning: not updating public commit %s\n")
1507 % scmutil.formatchangeid(old)
1508 % scmutil.formatchangeid(old)
1508 )
1509 )
1509 continue
1510 continue
1510 parents = [
1511 parents = [
1511 mapping.get(old.p1().node(), (old.p1(),))[0],
1512 mapping.get(old.p1().node(), (old.p1(),))[0],
1512 mapping.get(old.p2().node(), (old.p2(),))[0],
1513 mapping.get(old.p2().node(), (old.p2(),))[0],
1513 ]
1514 ]
1515 newdesc = rewriteutil.update_hash_refs(
1516 repo, newdesc, mapping,
1517 )
1514 new = context.metadataonlyctx(
1518 new = context.metadataonlyctx(
1515 repo,
1519 repo,
1516 old,
1520 old,
1517 parents=parents,
1521 parents=parents,
1518 text=newdesc,
1522 text=newdesc,
1519 user=old.user(),
1523 user=old.user(),
1520 date=old.date(),
1524 date=old.date(),
1521 extra=old.extra(),
1525 extra=old.extra(),
1522 )
1526 )
1523
1527
1524 newnode = new.commit()
1528 newnode = new.commit()
1525
1529
1526 mapping[old.node()] = [newnode]
1530 mapping[old.node()] = [newnode]
1527
1531
1528 if fold:
1532 if fold:
1529 # Defer updating the (single) Diff until all nodes are
1533 # Defer updating the (single) Diff until all nodes are
1530 # collected. No tags were created, so none need to be
1534 # collected. No tags were created, so none need to be
1531 # removed.
1535 # removed.
1532 newnodes.append(newnode)
1536 newnodes.append(newnode)
1533 continue
1537 continue
1534
1538
1535 _amend_diff_properties(
1539 _amend_diff_properties(
1536 unfi, drevid, [newnode], diffmap[old.node()]
1540 unfi, drevid, [newnode], diffmap[old.node()]
1537 )
1541 )
1538
1542
1539 # Remove local tags since it's no longer necessary
1543 # Remove local tags since it's no longer necessary
1540 tagname = b'D%d' % drevid
1544 tagname = b'D%d' % drevid
1541 if tagname in repo.tags():
1545 if tagname in repo.tags():
1542 tags.tag(
1546 tags.tag(
1543 repo,
1547 repo,
1544 tagname,
1548 tagname,
1545 nullid,
1549 nullid,
1546 message=None,
1550 message=None,
1547 user=None,
1551 user=None,
1548 date=None,
1552 date=None,
1549 local=True,
1553 local=True,
1550 )
1554 )
1551 elif fold:
1555 elif fold:
1552 # When folding multiple commits into one review with
1556 # When folding multiple commits into one review with
1553 # --fold, track even the commits that weren't amended, so
1557 # --fold, track even the commits that weren't amended, so
1554 # that their association isn't lost if the properties are
1558 # that their association isn't lost if the properties are
1555 # rewritten below.
1559 # rewritten below.
1556 newnodes.append(old.node())
1560 newnodes.append(old.node())
1557
1561
1558 # If the submitted commits are public, no amend takes place so
1562 # If the submitted commits are public, no amend takes place so
1559 # there are no newnodes and therefore no diff update to do.
1563 # there are no newnodes and therefore no diff update to do.
1560 if fold and newnodes:
1564 if fold and newnodes:
1561 diff = diffmap[old.node()]
1565 diff = diffmap[old.node()]
1562
1566
1563 # The diff object in diffmap doesn't have the local commits
1567 # The diff object in diffmap doesn't have the local commits
1564 # because that could be returned from differential.creatediff,
1568 # because that could be returned from differential.creatediff,
1565 # not differential.querydiffs. So use the queried diff (if
1569 # not differential.querydiffs. So use the queried diff (if
1566 # present), or force the amend (a new revision is being posted.)
1570 # present), or force the amend (a new revision is being posted.)
1567 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1571 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1568 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1572 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1569 _amend_diff_properties(unfi, drevid, newnodes, diff)
1573 _amend_diff_properties(unfi, drevid, newnodes, diff)
1570 else:
1574 else:
1571 _debug(
1575 _debug(
1572 ui,
1576 ui,
1573 b"local commit list for D%d is already up-to-date\n"
1577 b"local commit list for D%d is already up-to-date\n"
1574 % drevid,
1578 % drevid,
1575 )
1579 )
1576 elif fold:
1580 elif fold:
1577 _debug(ui, b"no newnodes to update\n")
1581 _debug(ui, b"no newnodes to update\n")
1578
1582
1579 # Restack any children of first-time submissions that were orphaned
1583 # Restack any children of first-time submissions that were orphaned
1580 # in the process. The ctx won't report that it is an orphan until
1584 # in the process. The ctx won't report that it is an orphan until
1581 # the cleanup takes place below.
1585 # the cleanup takes place below.
1582 for old in restack:
1586 for old in restack:
1583 parents = [
1587 parents = [
1584 mapping.get(old.p1().node(), (old.p1(),))[0],
1588 mapping.get(old.p1().node(), (old.p1(),))[0],
1585 mapping.get(old.p2().node(), (old.p2(),))[0],
1589 mapping.get(old.p2().node(), (old.p2(),))[0],
1586 ]
1590 ]
1587 new = context.metadataonlyctx(
1591 new = context.metadataonlyctx(
1588 repo,
1592 repo,
1589 old,
1593 old,
1590 parents=parents,
1594 parents=parents,
1591 text=old.description(),
1595 text=rewriteutil.update_hash_refs(
1596 repo, old.description(), mapping
1597 ),
1592 user=old.user(),
1598 user=old.user(),
1593 date=old.date(),
1599 date=old.date(),
1594 extra=old.extra(),
1600 extra=old.extra(),
1595 )
1601 )
1596
1602
1597 newnode = new.commit()
1603 newnode = new.commit()
1598
1604
1599 # Don't obsolete unselected descendants of nodes that have not
1605 # Don't obsolete unselected descendants of nodes that have not
1600 # been changed in this transaction- that results in an error.
1606 # been changed in this transaction- that results in an error.
1601 if newnode != old.node():
1607 if newnode != old.node():
1602 mapping[old.node()] = [newnode]
1608 mapping[old.node()] = [newnode]
1603 _debug(
1609 _debug(
1604 ui,
1610 ui,
1605 b"restabilizing %s as %s\n"
1611 b"restabilizing %s as %s\n"
1606 % (short(old.node()), short(newnode)),
1612 % (short(old.node()), short(newnode)),
1607 )
1613 )
1608 else:
1614 else:
1609 _debug(
1615 _debug(
1610 ui,
1616 ui,
1611 b"not restabilizing unchanged %s\n" % short(old.node()),
1617 b"not restabilizing unchanged %s\n" % short(old.node()),
1612 )
1618 )
1613
1619
1614 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1620 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1615 if wnode in mapping:
1621 if wnode in mapping:
1616 unfi.setparents(mapping[wnode][0])
1622 unfi.setparents(mapping[wnode][0])
1617
1623
1618
1624
1619 # Map from "hg:meta" keys to header understood by "hg import". The order is
1625 # Map from "hg:meta" keys to header understood by "hg import". The order is
1620 # consistent with "hg export" output.
1626 # consistent with "hg export" output.
1621 _metanamemap = util.sortdict(
1627 _metanamemap = util.sortdict(
1622 [
1628 [
1623 (b'user', b'User'),
1629 (b'user', b'User'),
1624 (b'date', b'Date'),
1630 (b'date', b'Date'),
1625 (b'branch', b'Branch'),
1631 (b'branch', b'Branch'),
1626 (b'node', b'Node ID'),
1632 (b'node', b'Node ID'),
1627 (b'parent', b'Parent '),
1633 (b'parent', b'Parent '),
1628 ]
1634 ]
1629 )
1635 )
1630
1636
1631
1637
1632 def _confirmbeforesend(repo, revs, oldmap):
1638 def _confirmbeforesend(repo, revs, oldmap):
1633 url, token = readurltoken(repo.ui)
1639 url, token = readurltoken(repo.ui)
1634 ui = repo.ui
1640 ui = repo.ui
1635 for rev in revs:
1641 for rev in revs:
1636 ctx = repo[rev]
1642 ctx = repo[rev]
1637 desc = ctx.description().splitlines()[0]
1643 desc = ctx.description().splitlines()[0]
1638 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1644 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1639 if drevid:
1645 if drevid:
1640 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1646 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1641 else:
1647 else:
1642 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1648 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1643
1649
1644 ui.write(
1650 ui.write(
1645 _(b'%s - %s: %s\n')
1651 _(b'%s - %s: %s\n')
1646 % (
1652 % (
1647 drevdesc,
1653 drevdesc,
1648 ui.label(bytes(ctx), b'phabricator.node'),
1654 ui.label(bytes(ctx), b'phabricator.node'),
1649 ui.label(desc, b'phabricator.desc'),
1655 ui.label(desc, b'phabricator.desc'),
1650 )
1656 )
1651 )
1657 )
1652
1658
1653 if ui.promptchoice(
1659 if ui.promptchoice(
1654 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1660 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1655 ):
1661 ):
1656 return False
1662 return False
1657
1663
1658 return True
1664 return True
1659
1665
1660
1666
1661 _knownstatusnames = {
1667 _knownstatusnames = {
1662 b'accepted',
1668 b'accepted',
1663 b'needsreview',
1669 b'needsreview',
1664 b'needsrevision',
1670 b'needsrevision',
1665 b'closed',
1671 b'closed',
1666 b'abandoned',
1672 b'abandoned',
1667 b'changesplanned',
1673 b'changesplanned',
1668 }
1674 }
1669
1675
1670
1676
1671 def _getstatusname(drev):
1677 def _getstatusname(drev):
1672 """get normalized status name from a Differential Revision"""
1678 """get normalized status name from a Differential Revision"""
1673 return drev[b'statusName'].replace(b' ', b'').lower()
1679 return drev[b'statusName'].replace(b' ', b'').lower()
1674
1680
1675
1681
1676 # Small language to specify differential revisions. Support symbols: (), :X,
1682 # Small language to specify differential revisions. Support symbols: (), :X,
1677 # +, and -.
1683 # +, and -.
1678
1684
1679 _elements = {
1685 _elements = {
1680 # token-type: binding-strength, primary, prefix, infix, suffix
1686 # token-type: binding-strength, primary, prefix, infix, suffix
1681 b'(': (12, None, (b'group', 1, b')'), None, None),
1687 b'(': (12, None, (b'group', 1, b')'), None, None),
1682 b':': (8, None, (b'ancestors', 8), None, None),
1688 b':': (8, None, (b'ancestors', 8), None, None),
1683 b'&': (5, None, None, (b'and_', 5), None),
1689 b'&': (5, None, None, (b'and_', 5), None),
1684 b'+': (4, None, None, (b'add', 4), None),
1690 b'+': (4, None, None, (b'add', 4), None),
1685 b'-': (4, None, None, (b'sub', 4), None),
1691 b'-': (4, None, None, (b'sub', 4), None),
1686 b')': (0, None, None, None, None),
1692 b')': (0, None, None, None, None),
1687 b'symbol': (0, b'symbol', None, None, None),
1693 b'symbol': (0, b'symbol', None, None, None),
1688 b'end': (0, None, None, None, None),
1694 b'end': (0, None, None, None, None),
1689 }
1695 }
1690
1696
1691
1697
1692 def _tokenize(text):
1698 def _tokenize(text):
1693 view = memoryview(text) # zero-copy slice
1699 view = memoryview(text) # zero-copy slice
1694 special = b'():+-& '
1700 special = b'():+-& '
1695 pos = 0
1701 pos = 0
1696 length = len(text)
1702 length = len(text)
1697 while pos < length:
1703 while pos < length:
1698 symbol = b''.join(
1704 symbol = b''.join(
1699 itertools.takewhile(
1705 itertools.takewhile(
1700 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1706 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1701 )
1707 )
1702 )
1708 )
1703 if symbol:
1709 if symbol:
1704 yield (b'symbol', symbol, pos)
1710 yield (b'symbol', symbol, pos)
1705 pos += len(symbol)
1711 pos += len(symbol)
1706 else: # special char, ignore space
1712 else: # special char, ignore space
1707 if text[pos : pos + 1] != b' ':
1713 if text[pos : pos + 1] != b' ':
1708 yield (text[pos : pos + 1], None, pos)
1714 yield (text[pos : pos + 1], None, pos)
1709 pos += 1
1715 pos += 1
1710 yield (b'end', None, pos)
1716 yield (b'end', None, pos)
1711
1717
1712
1718
1713 def _parse(text):
1719 def _parse(text):
1714 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1720 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1715 if pos != len(text):
1721 if pos != len(text):
1716 raise error.ParseError(b'invalid token', pos)
1722 raise error.ParseError(b'invalid token', pos)
1717 return tree
1723 return tree
1718
1724
1719
1725
1720 def _parsedrev(symbol):
1726 def _parsedrev(symbol):
1721 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1727 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1722 if symbol.startswith(b'D') and symbol[1:].isdigit():
1728 if symbol.startswith(b'D') and symbol[1:].isdigit():
1723 return int(symbol[1:])
1729 return int(symbol[1:])
1724 if symbol.isdigit():
1730 if symbol.isdigit():
1725 return int(symbol)
1731 return int(symbol)
1726
1732
1727
1733
1728 def _prefetchdrevs(tree):
1734 def _prefetchdrevs(tree):
1729 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1735 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1730 drevs = set()
1736 drevs = set()
1731 ancestordrevs = set()
1737 ancestordrevs = set()
1732 op = tree[0]
1738 op = tree[0]
1733 if op == b'symbol':
1739 if op == b'symbol':
1734 r = _parsedrev(tree[1])
1740 r = _parsedrev(tree[1])
1735 if r:
1741 if r:
1736 drevs.add(r)
1742 drevs.add(r)
1737 elif op == b'ancestors':
1743 elif op == b'ancestors':
1738 r, a = _prefetchdrevs(tree[1])
1744 r, a = _prefetchdrevs(tree[1])
1739 drevs.update(r)
1745 drevs.update(r)
1740 ancestordrevs.update(r)
1746 ancestordrevs.update(r)
1741 ancestordrevs.update(a)
1747 ancestordrevs.update(a)
1742 else:
1748 else:
1743 for t in tree[1:]:
1749 for t in tree[1:]:
1744 r, a = _prefetchdrevs(t)
1750 r, a = _prefetchdrevs(t)
1745 drevs.update(r)
1751 drevs.update(r)
1746 ancestordrevs.update(a)
1752 ancestordrevs.update(a)
1747 return drevs, ancestordrevs
1753 return drevs, ancestordrevs
1748
1754
1749
1755
1750 def querydrev(ui, spec):
1756 def querydrev(ui, spec):
1751 """return a list of "Differential Revision" dicts
1757 """return a list of "Differential Revision" dicts
1752
1758
1753 spec is a string using a simple query language, see docstring in phabread
1759 spec is a string using a simple query language, see docstring in phabread
1754 for details.
1760 for details.
1755
1761
1756 A "Differential Revision dict" looks like:
1762 A "Differential Revision dict" looks like:
1757
1763
1758 {
1764 {
1759 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1765 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1760 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1766 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1761 "auxiliary": {
1767 "auxiliary": {
1762 "phabricator:depends-on": [
1768 "phabricator:depends-on": [
1763 "PHID-DREV-gbapp366kutjebt7agcd"
1769 "PHID-DREV-gbapp366kutjebt7agcd"
1764 ]
1770 ]
1765 "phabricator:projects": [],
1771 "phabricator:projects": [],
1766 },
1772 },
1767 "branch": "default",
1773 "branch": "default",
1768 "ccs": [],
1774 "ccs": [],
1769 "commits": [],
1775 "commits": [],
1770 "dateCreated": "1499181406",
1776 "dateCreated": "1499181406",
1771 "dateModified": "1499182103",
1777 "dateModified": "1499182103",
1772 "diffs": [
1778 "diffs": [
1773 "3",
1779 "3",
1774 "4",
1780 "4",
1775 ],
1781 ],
1776 "hashes": [],
1782 "hashes": [],
1777 "id": "2",
1783 "id": "2",
1778 "lineCount": "2",
1784 "lineCount": "2",
1779 "phid": "PHID-DREV-672qvysjcczopag46qty",
1785 "phid": "PHID-DREV-672qvysjcczopag46qty",
1780 "properties": {},
1786 "properties": {},
1781 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1787 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1782 "reviewers": [],
1788 "reviewers": [],
1783 "sourcePath": null
1789 "sourcePath": null
1784 "status": "0",
1790 "status": "0",
1785 "statusName": "Needs Review",
1791 "statusName": "Needs Review",
1786 "summary": "",
1792 "summary": "",
1787 "testPlan": "",
1793 "testPlan": "",
1788 "title": "example",
1794 "title": "example",
1789 "uri": "https://phab.example.com/D2",
1795 "uri": "https://phab.example.com/D2",
1790 }
1796 }
1791 """
1797 """
1792 # TODO: replace differential.query and differential.querydiffs with
1798 # TODO: replace differential.query and differential.querydiffs with
1793 # differential.diff.search because the former (and their output) are
1799 # differential.diff.search because the former (and their output) are
1794 # frozen, and planned to be deprecated and removed.
1800 # frozen, and planned to be deprecated and removed.
1795
1801
1796 def fetch(params):
1802 def fetch(params):
1797 """params -> single drev or None"""
1803 """params -> single drev or None"""
1798 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1804 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1799 if key in prefetched:
1805 if key in prefetched:
1800 return prefetched[key]
1806 return prefetched[key]
1801 drevs = callconduit(ui, b'differential.query', params)
1807 drevs = callconduit(ui, b'differential.query', params)
1802 # Fill prefetched with the result
1808 # Fill prefetched with the result
1803 for drev in drevs:
1809 for drev in drevs:
1804 prefetched[drev[b'phid']] = drev
1810 prefetched[drev[b'phid']] = drev
1805 prefetched[int(drev[b'id'])] = drev
1811 prefetched[int(drev[b'id'])] = drev
1806 if key not in prefetched:
1812 if key not in prefetched:
1807 raise error.Abort(
1813 raise error.Abort(
1808 _(b'cannot get Differential Revision %r') % params
1814 _(b'cannot get Differential Revision %r') % params
1809 )
1815 )
1810 return prefetched[key]
1816 return prefetched[key]
1811
1817
1812 def getstack(topdrevids):
1818 def getstack(topdrevids):
1813 """given a top, get a stack from the bottom, [id] -> [id]"""
1819 """given a top, get a stack from the bottom, [id] -> [id]"""
1814 visited = set()
1820 visited = set()
1815 result = []
1821 result = []
1816 queue = [{b'ids': [i]} for i in topdrevids]
1822 queue = [{b'ids': [i]} for i in topdrevids]
1817 while queue:
1823 while queue:
1818 params = queue.pop()
1824 params = queue.pop()
1819 drev = fetch(params)
1825 drev = fetch(params)
1820 if drev[b'id'] in visited:
1826 if drev[b'id'] in visited:
1821 continue
1827 continue
1822 visited.add(drev[b'id'])
1828 visited.add(drev[b'id'])
1823 result.append(int(drev[b'id']))
1829 result.append(int(drev[b'id']))
1824 auxiliary = drev.get(b'auxiliary', {})
1830 auxiliary = drev.get(b'auxiliary', {})
1825 depends = auxiliary.get(b'phabricator:depends-on', [])
1831 depends = auxiliary.get(b'phabricator:depends-on', [])
1826 for phid in depends:
1832 for phid in depends:
1827 queue.append({b'phids': [phid]})
1833 queue.append({b'phids': [phid]})
1828 result.reverse()
1834 result.reverse()
1829 return smartset.baseset(result)
1835 return smartset.baseset(result)
1830
1836
1831 # Initialize prefetch cache
1837 # Initialize prefetch cache
1832 prefetched = {} # {id or phid: drev}
1838 prefetched = {} # {id or phid: drev}
1833
1839
1834 tree = _parse(spec)
1840 tree = _parse(spec)
1835 drevs, ancestordrevs = _prefetchdrevs(tree)
1841 drevs, ancestordrevs = _prefetchdrevs(tree)
1836
1842
1837 # developer config: phabricator.batchsize
1843 # developer config: phabricator.batchsize
1838 batchsize = ui.configint(b'phabricator', b'batchsize')
1844 batchsize = ui.configint(b'phabricator', b'batchsize')
1839
1845
1840 # Prefetch Differential Revisions in batch
1846 # Prefetch Differential Revisions in batch
1841 tofetch = set(drevs)
1847 tofetch = set(drevs)
1842 for r in ancestordrevs:
1848 for r in ancestordrevs:
1843 tofetch.update(range(max(1, r - batchsize), r + 1))
1849 tofetch.update(range(max(1, r - batchsize), r + 1))
1844 if drevs:
1850 if drevs:
1845 fetch({b'ids': list(tofetch)})
1851 fetch({b'ids': list(tofetch)})
1846 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1852 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1847
1853
1848 # Walk through the tree, return smartsets
1854 # Walk through the tree, return smartsets
1849 def walk(tree):
1855 def walk(tree):
1850 op = tree[0]
1856 op = tree[0]
1851 if op == b'symbol':
1857 if op == b'symbol':
1852 drev = _parsedrev(tree[1])
1858 drev = _parsedrev(tree[1])
1853 if drev:
1859 if drev:
1854 return smartset.baseset([drev])
1860 return smartset.baseset([drev])
1855 elif tree[1] in _knownstatusnames:
1861 elif tree[1] in _knownstatusnames:
1856 drevs = [
1862 drevs = [
1857 r
1863 r
1858 for r in validids
1864 for r in validids
1859 if _getstatusname(prefetched[r]) == tree[1]
1865 if _getstatusname(prefetched[r]) == tree[1]
1860 ]
1866 ]
1861 return smartset.baseset(drevs)
1867 return smartset.baseset(drevs)
1862 else:
1868 else:
1863 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1869 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1864 elif op in {b'and_', b'add', b'sub'}:
1870 elif op in {b'and_', b'add', b'sub'}:
1865 assert len(tree) == 3
1871 assert len(tree) == 3
1866 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1872 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1867 elif op == b'group':
1873 elif op == b'group':
1868 return walk(tree[1])
1874 return walk(tree[1])
1869 elif op == b'ancestors':
1875 elif op == b'ancestors':
1870 return getstack(walk(tree[1]))
1876 return getstack(walk(tree[1]))
1871 else:
1877 else:
1872 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1878 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1873
1879
1874 return [prefetched[r] for r in walk(tree)]
1880 return [prefetched[r] for r in walk(tree)]
1875
1881
1876
1882
1877 def getdescfromdrev(drev):
1883 def getdescfromdrev(drev):
1878 """get description (commit message) from "Differential Revision"
1884 """get description (commit message) from "Differential Revision"
1879
1885
1880 This is similar to differential.getcommitmessage API. But we only care
1886 This is similar to differential.getcommitmessage API. But we only care
1881 about limited fields: title, summary, test plan, and URL.
1887 about limited fields: title, summary, test plan, and URL.
1882 """
1888 """
1883 title = drev[b'title']
1889 title = drev[b'title']
1884 summary = drev[b'summary'].rstrip()
1890 summary = drev[b'summary'].rstrip()
1885 testplan = drev[b'testPlan'].rstrip()
1891 testplan = drev[b'testPlan'].rstrip()
1886 if testplan:
1892 if testplan:
1887 testplan = b'Test Plan:\n%s' % testplan
1893 testplan = b'Test Plan:\n%s' % testplan
1888 uri = b'Differential Revision: %s' % drev[b'uri']
1894 uri = b'Differential Revision: %s' % drev[b'uri']
1889 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1895 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1890
1896
1891
1897
1892 def get_amended_desc(drev, ctx, folded):
1898 def get_amended_desc(drev, ctx, folded):
1893 """similar to ``getdescfromdrev``, but supports a folded series of commits
1899 """similar to ``getdescfromdrev``, but supports a folded series of commits
1894
1900
1895 This is used when determining if an individual commit needs to have its
1901 This is used when determining if an individual commit needs to have its
1896 message amended after posting it for review. The determination is made for
1902 message amended after posting it for review. The determination is made for
1897 each individual commit, even when they were folded into one review.
1903 each individual commit, even when they were folded into one review.
1898 """
1904 """
1899 if not folded:
1905 if not folded:
1900 return getdescfromdrev(drev)
1906 return getdescfromdrev(drev)
1901
1907
1902 uri = b'Differential Revision: %s' % drev[b'uri']
1908 uri = b'Differential Revision: %s' % drev[b'uri']
1903
1909
1904 # Since the commit messages were combined when posting multiple commits
1910 # Since the commit messages were combined when posting multiple commits
1905 # with --fold, the fields can't be read from Phabricator here, or *all*
1911 # with --fold, the fields can't be read from Phabricator here, or *all*
1906 # affected local revisions will end up with the same commit message after
1912 # affected local revisions will end up with the same commit message after
1907 # the URI is amended in. Append in the DREV line, or update it if it
1913 # the URI is amended in. Append in the DREV line, or update it if it
1908 # exists. At worst, this means commit message or test plan updates on
1914 # exists. At worst, this means commit message or test plan updates on
1909 # Phabricator aren't propagated back to the repository, but that seems
1915 # Phabricator aren't propagated back to the repository, but that seems
1910 # reasonable for the case where local commits are effectively combined
1916 # reasonable for the case where local commits are effectively combined
1911 # in Phabricator.
1917 # in Phabricator.
1912 m = _differentialrevisiondescre.search(ctx.description())
1918 m = _differentialrevisiondescre.search(ctx.description())
1913 if not m:
1919 if not m:
1914 return b'\n\n'.join([ctx.description(), uri])
1920 return b'\n\n'.join([ctx.description(), uri])
1915
1921
1916 return _differentialrevisiondescre.sub(uri, ctx.description())
1922 return _differentialrevisiondescre.sub(uri, ctx.description())
1917
1923
1918
1924
1919 def getlocalcommits(diff):
1925 def getlocalcommits(diff):
1920 """get the set of local commits from a diff object
1926 """get the set of local commits from a diff object
1921
1927
1922 See ``getdiffmeta()`` for an example diff object.
1928 See ``getdiffmeta()`` for an example diff object.
1923 """
1929 """
1924 props = diff.get(b'properties') or {}
1930 props = diff.get(b'properties') or {}
1925 commits = props.get(b'local:commits') or {}
1931 commits = props.get(b'local:commits') or {}
1926 if len(commits) > 1:
1932 if len(commits) > 1:
1927 return {bin(c) for c in commits.keys()}
1933 return {bin(c) for c in commits.keys()}
1928
1934
1929 # Storing the diff metadata predates storing `local:commits`, so continue
1935 # Storing the diff metadata predates storing `local:commits`, so continue
1930 # to use that in the --no-fold case.
1936 # to use that in the --no-fold case.
1931 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1937 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1932
1938
1933
1939
1934 def getdiffmeta(diff):
1940 def getdiffmeta(diff):
1935 """get commit metadata (date, node, user, p1) from a diff object
1941 """get commit metadata (date, node, user, p1) from a diff object
1936
1942
1937 The metadata could be "hg:meta", sent by phabsend, like:
1943 The metadata could be "hg:meta", sent by phabsend, like:
1938
1944
1939 "properties": {
1945 "properties": {
1940 "hg:meta": {
1946 "hg:meta": {
1941 "branch": "default",
1947 "branch": "default",
1942 "date": "1499571514 25200",
1948 "date": "1499571514 25200",
1943 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1949 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1944 "user": "Foo Bar <foo@example.com>",
1950 "user": "Foo Bar <foo@example.com>",
1945 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1951 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1946 }
1952 }
1947 }
1953 }
1948
1954
1949 Or converted from "local:commits", sent by "arc", like:
1955 Or converted from "local:commits", sent by "arc", like:
1950
1956
1951 "properties": {
1957 "properties": {
1952 "local:commits": {
1958 "local:commits": {
1953 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1959 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1954 "author": "Foo Bar",
1960 "author": "Foo Bar",
1955 "authorEmail": "foo@example.com"
1961 "authorEmail": "foo@example.com"
1956 "branch": "default",
1962 "branch": "default",
1957 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1963 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1958 "local": "1000",
1964 "local": "1000",
1959 "message": "...",
1965 "message": "...",
1960 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1966 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1961 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1967 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1962 "summary": "...",
1968 "summary": "...",
1963 "tag": "",
1969 "tag": "",
1964 "time": 1499546314,
1970 "time": 1499546314,
1965 }
1971 }
1966 }
1972 }
1967 }
1973 }
1968
1974
1969 Note: metadata extracted from "local:commits" will lose time zone
1975 Note: metadata extracted from "local:commits" will lose time zone
1970 information.
1976 information.
1971 """
1977 """
1972 props = diff.get(b'properties') or {}
1978 props = diff.get(b'properties') or {}
1973 meta = props.get(b'hg:meta')
1979 meta = props.get(b'hg:meta')
1974 if not meta:
1980 if not meta:
1975 if props.get(b'local:commits'):
1981 if props.get(b'local:commits'):
1976 commit = sorted(props[b'local:commits'].values())[0]
1982 commit = sorted(props[b'local:commits'].values())[0]
1977 meta = {}
1983 meta = {}
1978 if b'author' in commit and b'authorEmail' in commit:
1984 if b'author' in commit and b'authorEmail' in commit:
1979 meta[b'user'] = b'%s <%s>' % (
1985 meta[b'user'] = b'%s <%s>' % (
1980 commit[b'author'],
1986 commit[b'author'],
1981 commit[b'authorEmail'],
1987 commit[b'authorEmail'],
1982 )
1988 )
1983 if b'time' in commit:
1989 if b'time' in commit:
1984 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1990 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1985 if b'branch' in commit:
1991 if b'branch' in commit:
1986 meta[b'branch'] = commit[b'branch']
1992 meta[b'branch'] = commit[b'branch']
1987 node = commit.get(b'commit', commit.get(b'rev'))
1993 node = commit.get(b'commit', commit.get(b'rev'))
1988 if node:
1994 if node:
1989 meta[b'node'] = node
1995 meta[b'node'] = node
1990 if len(commit.get(b'parents', ())) >= 1:
1996 if len(commit.get(b'parents', ())) >= 1:
1991 meta[b'parent'] = commit[b'parents'][0]
1997 meta[b'parent'] = commit[b'parents'][0]
1992 else:
1998 else:
1993 meta = {}
1999 meta = {}
1994 if b'date' not in meta and b'dateCreated' in diff:
2000 if b'date' not in meta and b'dateCreated' in diff:
1995 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2001 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1996 if b'branch' not in meta and diff.get(b'branch'):
2002 if b'branch' not in meta and diff.get(b'branch'):
1997 meta[b'branch'] = diff[b'branch']
2003 meta[b'branch'] = diff[b'branch']
1998 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2004 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1999 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2005 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2000 return meta
2006 return meta
2001
2007
2002
2008
2003 def _getdrevs(ui, stack, specs):
2009 def _getdrevs(ui, stack, specs):
2004 """convert user supplied DREVSPECs into "Differential Revision" dicts
2010 """convert user supplied DREVSPECs into "Differential Revision" dicts
2005
2011
2006 See ``hg help phabread`` for how to specify each DREVSPEC.
2012 See ``hg help phabread`` for how to specify each DREVSPEC.
2007 """
2013 """
2008 if len(specs) > 0:
2014 if len(specs) > 0:
2009
2015
2010 def _formatspec(s):
2016 def _formatspec(s):
2011 if stack:
2017 if stack:
2012 s = b':(%s)' % s
2018 s = b':(%s)' % s
2013 return b'(%s)' % s
2019 return b'(%s)' % s
2014
2020
2015 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2021 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2016
2022
2017 drevs = querydrev(ui, spec)
2023 drevs = querydrev(ui, spec)
2018 if drevs:
2024 if drevs:
2019 return drevs
2025 return drevs
2020
2026
2021 raise error.Abort(_(b"empty DREVSPEC set"))
2027 raise error.Abort(_(b"empty DREVSPEC set"))
2022
2028
2023
2029
2024 def readpatch(ui, drevs, write):
2030 def readpatch(ui, drevs, write):
2025 """generate plain-text patch readable by 'hg import'
2031 """generate plain-text patch readable by 'hg import'
2026
2032
2027 write takes a list of (DREV, bytes), where DREV is the differential number
2033 write takes a list of (DREV, bytes), where DREV is the differential number
2028 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2034 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2029 to be imported. drevs is what "querydrev" returns, results of
2035 to be imported. drevs is what "querydrev" returns, results of
2030 "differential.query".
2036 "differential.query".
2031 """
2037 """
2032 # Prefetch hg:meta property for all diffs
2038 # Prefetch hg:meta property for all diffs
2033 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2039 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2034 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2040 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2035
2041
2036 patches = []
2042 patches = []
2037
2043
2038 # Generate patch for each drev
2044 # Generate patch for each drev
2039 for drev in drevs:
2045 for drev in drevs:
2040 ui.note(_(b'reading D%s\n') % drev[b'id'])
2046 ui.note(_(b'reading D%s\n') % drev[b'id'])
2041
2047
2042 diffid = max(int(v) for v in drev[b'diffs'])
2048 diffid = max(int(v) for v in drev[b'diffs'])
2043 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2049 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2044 desc = getdescfromdrev(drev)
2050 desc = getdescfromdrev(drev)
2045 header = b'# HG changeset patch\n'
2051 header = b'# HG changeset patch\n'
2046
2052
2047 # Try to preserve metadata from hg:meta property. Write hg patch
2053 # Try to preserve metadata from hg:meta property. Write hg patch
2048 # headers that can be read by the "import" command. See patchheadermap
2054 # headers that can be read by the "import" command. See patchheadermap
2049 # and extract in mercurial/patch.py for supported headers.
2055 # and extract in mercurial/patch.py for supported headers.
2050 meta = getdiffmeta(diffs[b'%d' % diffid])
2056 meta = getdiffmeta(diffs[b'%d' % diffid])
2051 for k in _metanamemap.keys():
2057 for k in _metanamemap.keys():
2052 if k in meta:
2058 if k in meta:
2053 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2059 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2054
2060
2055 content = b'%s%s\n%s' % (header, desc, body)
2061 content = b'%s%s\n%s' % (header, desc, body)
2056 patches.append((drev[b'id'], content))
2062 patches.append((drev[b'id'], content))
2057
2063
2058 # Write patches to the supplied callback
2064 # Write patches to the supplied callback
2059 write(patches)
2065 write(patches)
2060
2066
2061
2067
2062 @vcrcommand(
2068 @vcrcommand(
2063 b'phabread',
2069 b'phabread',
2064 [(b'', b'stack', False, _(b'read dependencies'))],
2070 [(b'', b'stack', False, _(b'read dependencies'))],
2065 _(b'DREVSPEC... [OPTIONS]'),
2071 _(b'DREVSPEC... [OPTIONS]'),
2066 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2072 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2067 optionalrepo=True,
2073 optionalrepo=True,
2068 )
2074 )
2069 def phabread(ui, repo, *specs, **opts):
2075 def phabread(ui, repo, *specs, **opts):
2070 """print patches from Phabricator suitable for importing
2076 """print patches from Phabricator suitable for importing
2071
2077
2072 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2078 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2073 the number ``123``. It could also have common operators like ``+``, ``-``,
2079 the number ``123``. It could also have common operators like ``+``, ``-``,
2074 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2080 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2075 select a stack. If multiple DREVSPEC values are given, the result is the
2081 select a stack. If multiple DREVSPEC values are given, the result is the
2076 union of each individually evaluated value. No attempt is currently made
2082 union of each individually evaluated value. No attempt is currently made
2077 to reorder the values to run from parent to child.
2083 to reorder the values to run from parent to child.
2078
2084
2079 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2085 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2080 could be used to filter patches by status. For performance reason, they
2086 could be used to filter patches by status. For performance reason, they
2081 only represent a subset of non-status selections and cannot be used alone.
2087 only represent a subset of non-status selections and cannot be used alone.
2082
2088
2083 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2089 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2084 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2090 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2085 stack up to D9.
2091 stack up to D9.
2086
2092
2087 If --stack is given, follow dependencies information and read all patches.
2093 If --stack is given, follow dependencies information and read all patches.
2088 It is equivalent to the ``:`` operator.
2094 It is equivalent to the ``:`` operator.
2089 """
2095 """
2090 opts = pycompat.byteskwargs(opts)
2096 opts = pycompat.byteskwargs(opts)
2091 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2097 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2092
2098
2093 def _write(patches):
2099 def _write(patches):
2094 for drev, content in patches:
2100 for drev, content in patches:
2095 ui.write(content)
2101 ui.write(content)
2096
2102
2097 readpatch(ui, drevs, _write)
2103 readpatch(ui, drevs, _write)
2098
2104
2099
2105
2100 @vcrcommand(
2106 @vcrcommand(
2101 b'phabimport',
2107 b'phabimport',
2102 [(b'', b'stack', False, _(b'import dependencies as well'))],
2108 [(b'', b'stack', False, _(b'import dependencies as well'))],
2103 _(b'DREVSPEC... [OPTIONS]'),
2109 _(b'DREVSPEC... [OPTIONS]'),
2104 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2110 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2105 )
2111 )
2106 def phabimport(ui, repo, *specs, **opts):
2112 def phabimport(ui, repo, *specs, **opts):
2107 """import patches from Phabricator for the specified Differential Revisions
2113 """import patches from Phabricator for the specified Differential Revisions
2108
2114
2109 The patches are read and applied starting at the parent of the working
2115 The patches are read and applied starting at the parent of the working
2110 directory.
2116 directory.
2111
2117
2112 See ``hg help phabread`` for how to specify DREVSPEC.
2118 See ``hg help phabread`` for how to specify DREVSPEC.
2113 """
2119 """
2114 opts = pycompat.byteskwargs(opts)
2120 opts = pycompat.byteskwargs(opts)
2115
2121
2116 # --bypass avoids losing exec and symlink bits when importing on Windows,
2122 # --bypass avoids losing exec and symlink bits when importing on Windows,
2117 # and allows importing with a dirty wdir. It also aborts instead of leaving
2123 # and allows importing with a dirty wdir. It also aborts instead of leaving
2118 # rejects.
2124 # rejects.
2119 opts[b'bypass'] = True
2125 opts[b'bypass'] = True
2120
2126
2121 # Mandatory default values, synced with commands.import
2127 # Mandatory default values, synced with commands.import
2122 opts[b'strip'] = 1
2128 opts[b'strip'] = 1
2123 opts[b'prefix'] = b''
2129 opts[b'prefix'] = b''
2124 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2130 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2125 opts[b'obsolete'] = False
2131 opts[b'obsolete'] = False
2126
2132
2127 if ui.configbool(b'phabimport', b'secret'):
2133 if ui.configbool(b'phabimport', b'secret'):
2128 opts[b'secret'] = True
2134 opts[b'secret'] = True
2129 if ui.configbool(b'phabimport', b'obsolete'):
2135 if ui.configbool(b'phabimport', b'obsolete'):
2130 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2136 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2131
2137
2132 def _write(patches):
2138 def _write(patches):
2133 parents = repo[None].parents()
2139 parents = repo[None].parents()
2134
2140
2135 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2141 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2136 for drev, contents in patches:
2142 for drev, contents in patches:
2137 ui.status(_(b'applying patch from D%s\n') % drev)
2143 ui.status(_(b'applying patch from D%s\n') % drev)
2138
2144
2139 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2145 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2140 msg, node, rej = cmdutil.tryimportone(
2146 msg, node, rej = cmdutil.tryimportone(
2141 ui,
2147 ui,
2142 repo,
2148 repo,
2143 patchdata,
2149 patchdata,
2144 parents,
2150 parents,
2145 opts,
2151 opts,
2146 [],
2152 [],
2147 None, # Never update wdir to another revision
2153 None, # Never update wdir to another revision
2148 )
2154 )
2149
2155
2150 if not node:
2156 if not node:
2151 raise error.Abort(_(b'D%s: no diffs found') % drev)
2157 raise error.Abort(_(b'D%s: no diffs found') % drev)
2152
2158
2153 ui.note(msg + b'\n')
2159 ui.note(msg + b'\n')
2154 parents = [repo[node]]
2160 parents = [repo[node]]
2155
2161
2156 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2162 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2157
2163
2158 readpatch(repo.ui, drevs, _write)
2164 readpatch(repo.ui, drevs, _write)
2159
2165
2160
2166
2161 @vcrcommand(
2167 @vcrcommand(
2162 b'phabupdate',
2168 b'phabupdate',
2163 [
2169 [
2164 (b'', b'accept', False, _(b'accept revisions')),
2170 (b'', b'accept', False, _(b'accept revisions')),
2165 (b'', b'reject', False, _(b'reject revisions')),
2171 (b'', b'reject', False, _(b'reject revisions')),
2166 (b'', b'request-review', False, _(b'request review on revisions')),
2172 (b'', b'request-review', False, _(b'request review on revisions')),
2167 (b'', b'abandon', False, _(b'abandon revisions')),
2173 (b'', b'abandon', False, _(b'abandon revisions')),
2168 (b'', b'reclaim', False, _(b'reclaim revisions')),
2174 (b'', b'reclaim', False, _(b'reclaim revisions')),
2169 (b'', b'close', False, _(b'close revisions')),
2175 (b'', b'close', False, _(b'close revisions')),
2170 (b'', b'reopen', False, _(b'reopen revisions')),
2176 (b'', b'reopen', False, _(b'reopen revisions')),
2171 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2177 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2172 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2178 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2173 (b'', b'commandeer', False, _(b'commandeer revisions')),
2179 (b'', b'commandeer', False, _(b'commandeer revisions')),
2174 (b'm', b'comment', b'', _(b'comment on the last revision')),
2180 (b'm', b'comment', b'', _(b'comment on the last revision')),
2175 ],
2181 ],
2176 _(b'DREVSPEC... [OPTIONS]'),
2182 _(b'DREVSPEC... [OPTIONS]'),
2177 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2183 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2178 optionalrepo=True,
2184 optionalrepo=True,
2179 )
2185 )
2180 def phabupdate(ui, repo, *specs, **opts):
2186 def phabupdate(ui, repo, *specs, **opts):
2181 """update Differential Revision in batch
2187 """update Differential Revision in batch
2182
2188
2183 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2189 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2184 """
2190 """
2185 opts = pycompat.byteskwargs(opts)
2191 opts = pycompat.byteskwargs(opts)
2186 transactions = [
2192 transactions = [
2187 b'abandon',
2193 b'abandon',
2188 b'accept',
2194 b'accept',
2189 b'close',
2195 b'close',
2190 b'commandeer',
2196 b'commandeer',
2191 b'plan-changes',
2197 b'plan-changes',
2192 b'reclaim',
2198 b'reclaim',
2193 b'reject',
2199 b'reject',
2194 b'reopen',
2200 b'reopen',
2195 b'request-review',
2201 b'request-review',
2196 b'resign',
2202 b'resign',
2197 ]
2203 ]
2198 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2204 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2199 if len(flags) > 1:
2205 if len(flags) > 1:
2200 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2206 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2201
2207
2202 actions = []
2208 actions = []
2203 for f in flags:
2209 for f in flags:
2204 actions.append({b'type': f, b'value': True})
2210 actions.append({b'type': f, b'value': True})
2205
2211
2206 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2212 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2207 for i, drev in enumerate(drevs):
2213 for i, drev in enumerate(drevs):
2208 if i + 1 == len(drevs) and opts.get(b'comment'):
2214 if i + 1 == len(drevs) and opts.get(b'comment'):
2209 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2215 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2210 if actions:
2216 if actions:
2211 params = {
2217 params = {
2212 b'objectIdentifier': drev[b'phid'],
2218 b'objectIdentifier': drev[b'phid'],
2213 b'transactions': actions,
2219 b'transactions': actions,
2214 }
2220 }
2215 callconduit(ui, b'differential.revision.edit', params)
2221 callconduit(ui, b'differential.revision.edit', params)
2216
2222
2217
2223
2218 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2224 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2219 def template_review(context, mapping):
2225 def template_review(context, mapping):
2220 """:phabreview: Object describing the review for this changeset.
2226 """:phabreview: Object describing the review for this changeset.
2221 Has attributes `url` and `id`.
2227 Has attributes `url` and `id`.
2222 """
2228 """
2223 ctx = context.resource(mapping, b'ctx')
2229 ctx = context.resource(mapping, b'ctx')
2224 m = _differentialrevisiondescre.search(ctx.description())
2230 m = _differentialrevisiondescre.search(ctx.description())
2225 if m:
2231 if m:
2226 return templateutil.hybriddict(
2232 return templateutil.hybriddict(
2227 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2233 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2228 )
2234 )
2229 else:
2235 else:
2230 tags = ctx.repo().nodetags(ctx.node())
2236 tags = ctx.repo().nodetags(ctx.node())
2231 for t in tags:
2237 for t in tags:
2232 if _differentialrevisiontagre.match(t):
2238 if _differentialrevisiontagre.match(t):
2233 url = ctx.repo().ui.config(b'phabricator', b'url')
2239 url = ctx.repo().ui.config(b'phabricator', b'url')
2234 if not url.endswith(b'/'):
2240 if not url.endswith(b'/'):
2235 url += b'/'
2241 url += b'/'
2236 url += t
2242 url += t
2237
2243
2238 return templateutil.hybriddict({b'url': url, b'id': t,})
2244 return templateutil.hybriddict({b'url': url, b'id': t,})
2239 return None
2245 return None
2240
2246
2241
2247
2242 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2248 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2243 def template_status(context, mapping):
2249 def template_status(context, mapping):
2244 """:phabstatus: String. Status of Phabricator differential.
2250 """:phabstatus: String. Status of Phabricator differential.
2245 """
2251 """
2246 ctx = context.resource(mapping, b'ctx')
2252 ctx = context.resource(mapping, b'ctx')
2247 repo = context.resource(mapping, b'repo')
2253 repo = context.resource(mapping, b'repo')
2248 ui = context.resource(mapping, b'ui')
2254 ui = context.resource(mapping, b'ui')
2249
2255
2250 rev = ctx.rev()
2256 rev = ctx.rev()
2251 try:
2257 try:
2252 drevid = getdrevmap(repo, [rev])[rev]
2258 drevid = getdrevmap(repo, [rev])[rev]
2253 except KeyError:
2259 except KeyError:
2254 return None
2260 return None
2255 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2261 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2256 for drev in drevs:
2262 for drev in drevs:
2257 if int(drev[b'id']) == drevid:
2263 if int(drev[b'id']) == drevid:
2258 return templateutil.hybriddict(
2264 return templateutil.hybriddict(
2259 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2265 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2260 )
2266 )
2261 return None
2267 return None
2262
2268
2263
2269
2264 @show.showview(b'phabstatus', csettopic=b'work')
2270 @show.showview(b'phabstatus', csettopic=b'work')
2265 def phabstatusshowview(ui, repo, displayer):
2271 def phabstatusshowview(ui, repo, displayer):
2266 """Phabricator differiential status"""
2272 """Phabricator differiential status"""
2267 revs = repo.revs('sort(_underway(), topo)')
2273 revs = repo.revs('sort(_underway(), topo)')
2268 drevmap = getdrevmap(repo, revs)
2274 drevmap = getdrevmap(repo, revs)
2269 unknownrevs, drevids, revsbydrevid = [], set(), {}
2275 unknownrevs, drevids, revsbydrevid = [], set(), {}
2270 for rev, drevid in pycompat.iteritems(drevmap):
2276 for rev, drevid in pycompat.iteritems(drevmap):
2271 if drevid is not None:
2277 if drevid is not None:
2272 drevids.add(drevid)
2278 drevids.add(drevid)
2273 revsbydrevid.setdefault(drevid, set()).add(rev)
2279 revsbydrevid.setdefault(drevid, set()).add(rev)
2274 else:
2280 else:
2275 unknownrevs.append(rev)
2281 unknownrevs.append(rev)
2276
2282
2277 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2283 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2278 drevsbyrev = {}
2284 drevsbyrev = {}
2279 for drev in drevs:
2285 for drev in drevs:
2280 for rev in revsbydrevid[int(drev[b'id'])]:
2286 for rev in revsbydrevid[int(drev[b'id'])]:
2281 drevsbyrev[rev] = drev
2287 drevsbyrev[rev] = drev
2282
2288
2283 def phabstatus(ctx):
2289 def phabstatus(ctx):
2284 drev = drevsbyrev[ctx.rev()]
2290 drev = drevsbyrev[ctx.rev()]
2285 status = ui.label(
2291 status = ui.label(
2286 b'%(statusName)s' % drev,
2292 b'%(statusName)s' % drev,
2287 b'phabricator.status.%s' % _getstatusname(drev),
2293 b'phabricator.status.%s' % _getstatusname(drev),
2288 )
2294 )
2289 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2295 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2290
2296
2291 revs -= smartset.baseset(unknownrevs)
2297 revs -= smartset.baseset(unknownrevs)
2292 revdag = graphmod.dagwalker(repo, revs)
2298 revdag = graphmod.dagwalker(repo, revs)
2293
2299
2294 ui.setconfig(b'experimental', b'graphshorten', True)
2300 ui.setconfig(b'experimental', b'graphshorten', True)
2295 displayer._exthook = phabstatus
2301 displayer._exthook = phabstatus
2296 nodelen = show.longestshortest(repo, revs)
2302 nodelen = show.longestshortest(repo, revs)
2297 logcmdutil.displaygraph(
2303 logcmdutil.displaygraph(
2298 ui,
2304 ui,
2299 repo,
2305 repo,
2300 revdag,
2306 revdag,
2301 displayer,
2307 displayer,
2302 graphmod.asciiedges,
2308 graphmod.asciiedges,
2303 props={b'nodelen': nodelen},
2309 props={b'nodelen': nodelen},
2304 )
2310 )
@@ -1,974 +1,1006 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 >
5 >
6 > [auth]
6 > [auth]
7 > hgphab.schemes = https
7 > hgphab.schemes = https
8 > hgphab.prefix = phab.mercurial-scm.org
8 > hgphab.prefix = phab.mercurial-scm.org
9 > # When working on the extension and making phabricator interaction
9 > # When working on the extension and making phabricator interaction
10 > # changes, edit this to be a real phabricator token. When done, edit
10 > # changes, edit this to be a real phabricator token. When done, edit
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
12 > # token with this value.
12 > # token with this value.
13 > hgphab.phabtoken = cli-hahayouwish
13 > hgphab.phabtoken = cli-hahayouwish
14 >
14 >
15 > [phabricator]
15 > [phabricator]
16 > debug = True
16 > debug = True
17 > EOF
17 > EOF
18 $ hg init repo
18 $ hg init repo
19 $ cd repo
19 $ cd repo
20 $ cat >> .hg/hgrc <<EOF
20 $ cat >> .hg/hgrc <<EOF
21 > [phabricator]
21 > [phabricator]
22 > url = https://phab.mercurial-scm.org/
22 > url = https://phab.mercurial-scm.org/
23 > callsign = HG
23 > callsign = HG
24 > EOF
24 > EOF
25 $ VCR="$TESTDIR/phabricator"
25 $ VCR="$TESTDIR/phabricator"
26
26
27 debugcallconduit doesn't claim invalid arguments without --test-vcr:
27 debugcallconduit doesn't claim invalid arguments without --test-vcr:
28 $ echo '{}' | HGRCSKIPREPO= hg debugcallconduit 'conduit.ping'
28 $ echo '{}' | HGRCSKIPREPO= hg debugcallconduit 'conduit.ping'
29 abort: config phabricator.url is required
29 abort: config phabricator.url is required
30 [255]
30 [255]
31
31
32 Error is handled reasonably. We override the phabtoken here so that
32 Error is handled reasonably. We override the phabtoken here so that
33 when you're developing changes to phabricator.py you can edit the
33 when you're developing changes to phabricator.py you can edit the
34 above config and have a real token in the test but not have to edit
34 above config and have a real token in the test but not have to edit
35 this test.
35 this test.
36 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
36 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
37 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
37 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
38 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
38 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
39
39
40 Missing arguments don't crash, and may print the command help
40 Missing arguments don't crash, and may print the command help
41
41
42 $ hg debugcallconduit
42 $ hg debugcallconduit
43 hg debugcallconduit: invalid arguments
43 hg debugcallconduit: invalid arguments
44 hg debugcallconduit METHOD
44 hg debugcallconduit METHOD
45
45
46 call Conduit API
46 call Conduit API
47
47
48 options:
48 options:
49
49
50 (use 'hg debugcallconduit -h' to show more help)
50 (use 'hg debugcallconduit -h' to show more help)
51 [255]
51 [255]
52 $ hg phabread
52 $ hg phabread
53 abort: empty DREVSPEC set
53 abort: empty DREVSPEC set
54 [255]
54 [255]
55
55
56 Basic phabread:
56 Basic phabread:
57 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
57 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
58 # HG changeset patch
58 # HG changeset patch
59 # Date 1536771503 0
59 # Date 1536771503 0
60 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
60 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
61 exchangev2: start to implement pull with wire protocol v2
61 exchangev2: start to implement pull with wire protocol v2
62
62
63 Wire protocol version 2 will take a substantially different
63 Wire protocol version 2 will take a substantially different
64 approach to exchange than version 1 (at least as far as pulling
64 approach to exchange than version 1 (at least as far as pulling
65 is concerned).
65 is concerned).
66
66
67 This commit establishes a new exchangev2 module for holding
67 This commit establishes a new exchangev2 module for holding
68
68
69 Phabread with multiple DREVSPEC
69 Phabread with multiple DREVSPEC
70
70
71 TODO: attempt to order related revisions like --stack?
71 TODO: attempt to order related revisions like --stack?
72 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
72 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
73 > | grep '^Differential Revision'
73 > | grep '^Differential Revision'
74 Differential Revision: https://phab.mercurial-scm.org/D8205
74 Differential Revision: https://phab.mercurial-scm.org/D8205
75 Differential Revision: https://phab.mercurial-scm.org/D8206
75 Differential Revision: https://phab.mercurial-scm.org/D8206
76 Differential Revision: https://phab.mercurial-scm.org/D8207
76 Differential Revision: https://phab.mercurial-scm.org/D8207
77
77
78 Empty DREVSPECs don't crash
78 Empty DREVSPECs don't crash
79
79
80 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
80 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
81 abort: empty DREVSPEC set
81 abort: empty DREVSPEC set
82 [255]
82 [255]
83
83
84
84
85 phabupdate with an accept:
85 phabupdate with an accept:
86 $ hg phabupdate --accept D4564 \
86 $ hg phabupdate --accept D4564 \
87 > -m 'I think I like where this is headed. Will read rest of series later.'\
87 > -m 'I think I like where this is headed. Will read rest of series later.'\
88 > --test-vcr "$VCR/accept-4564.json"
88 > --test-vcr "$VCR/accept-4564.json"
89 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
89 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
90 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
90 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
91 [255]
91 [255]
92 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
92 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
93
93
94 phabupdate with --plan-changes:
94 phabupdate with --plan-changes:
95
95
96 $ hg phabupdate --plan-changes D6876 --test-vcr "$VCR/phabupdate-change-6876.json"
96 $ hg phabupdate --plan-changes D6876 --test-vcr "$VCR/phabupdate-change-6876.json"
97
97
98 Create a differential diff:
98 Create a differential diff:
99 $ HGENCODING=utf-8; export HGENCODING
99 $ HGENCODING=utf-8; export HGENCODING
100 $ echo alpha > alpha
100 $ echo alpha > alpha
101 $ hg ci --addremove -m 'create alpha for phabricator test €'
101 $ hg ci --addremove -m 'create alpha for phabricator test €'
102 adding alpha
102 adding alpha
103 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
103 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
104 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
104 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
105 new commits: ['347bf67801e5']
105 new commits: ['347bf67801e5']
106 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
106 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
107 $ echo more >> alpha
107 $ echo more >> alpha
108 $ HGEDITOR=true hg ci --amend
108 $ HGEDITOR=true hg ci --amend
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
110 $ echo beta > beta
110 $ echo beta > beta
111 $ hg ci --addremove -m 'create beta for phabricator test'
111 $ hg ci --addremove -m 'create beta for phabricator test'
112 adding beta
112 adding beta
113 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
113 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
114 c44b38f24a45 mapped to old nodes []
114 c44b38f24a45 mapped to old nodes []
115 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
115 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
116 D7916 - created - 9e6901f21d5b: create beta for phabricator test
116 D7916 - created - 9e6901f21d5b: create beta for phabricator test
117 new commits: ['a692622e6937']
117 new commits: ['a692622e6937']
118 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
118 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
119 $ unset HGENCODING
119 $ unset HGENCODING
120
120
121 The amend won't explode after posting a public commit. The local tag is left
121 The amend won't explode after posting a public commit. The local tag is left
122 behind to identify it.
122 behind to identify it.
123
123
124 $ echo 'public change' > beta
124 $ echo 'public change' > beta
125 $ hg ci -m 'create public change for phabricator testing'
125 $ hg ci -m 'create public change for phabricator testing'
126 $ hg phase --public .
126 $ hg phase --public .
127 $ echo 'draft change' > alpha
127 $ echo 'draft change' > alpha
128 $ hg ci -m 'create draft change for phabricator testing'
128 $ hg ci -m 'create draft change for phabricator testing'
129 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
129 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
130 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
130 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
131 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
131 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
132 warning: not updating public commit 2:7b4185ab5d16
132 warning: not updating public commit 2:7b4185ab5d16
133 new commits: ['3244dc4a3334']
133 new commits: ['3244dc4a3334']
134 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
134 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
135 $ hg tags -v
135 $ hg tags -v
136 tip 3:3244dc4a3334
136 tip 3:3244dc4a3334
137 D7917 2:7b4185ab5d16 local
137 D7917 2:7b4185ab5d16 local
138
138
139 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
139 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
140 > {
140 > {
141 > "constraints": {
141 > "constraints": {
142 > "isBot": true
142 > "isBot": true
143 > }
143 > }
144 > }
144 > }
145 > EOF
145 > EOF
146 {
146 {
147 "cursor": {
147 "cursor": {
148 "after": null,
148 "after": null,
149 "before": null,
149 "before": null,
150 "limit": 100,
150 "limit": 100,
151 "order": null
151 "order": null
152 },
152 },
153 "data": [],
153 "data": [],
154 "maps": {},
154 "maps": {},
155 "query": {
155 "query": {
156 "queryKey": null
156 "queryKey": null
157 }
157 }
158 }
158 }
159
159
160 Template keywords
160 Template keywords
161 $ hg log -T'{rev} {phabreview|json}\n'
161 $ hg log -T'{rev} {phabreview|json}\n'
162 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
162 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
163 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
163 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
164 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
164 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
165 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
165 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
166
166
167 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
167 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
168 3 https://phab.mercurial-scm.org/D7918 D7918
168 3 https://phab.mercurial-scm.org/D7918 D7918
169 2 https://phab.mercurial-scm.org/D7917 D7917
169 2 https://phab.mercurial-scm.org/D7917 D7917
170 1 https://phab.mercurial-scm.org/D7916 D7916
170 1 https://phab.mercurial-scm.org/D7916 D7916
171 0 https://phab.mercurial-scm.org/D7915 D7915
171 0 https://phab.mercurial-scm.org/D7915 D7915
172
172
173 Commenting when phabsending:
173 Commenting when phabsending:
174 $ echo comment > comment
174 $ echo comment > comment
175 $ hg ci --addremove -m "create comment for phabricator test"
175 $ hg ci --addremove -m "create comment for phabricator test"
176 adding comment
176 adding comment
177 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
177 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
178 D7919 - created - d5dddca9023d: create comment for phabricator test
178 D7919 - created - d5dddca9023d: create comment for phabricator test
179 new commits: ['f7db812bbe1d']
179 new commits: ['f7db812bbe1d']
180 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
180 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
181 $ echo comment2 >> comment
181 $ echo comment2 >> comment
182 $ hg ci --amend
182 $ hg ci --amend
183 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
183 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
184 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
184 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
185 1849d7828727 mapped to old nodes []
185 1849d7828727 mapped to old nodes []
186 D7919 - updated - 1849d7828727: create comment for phabricator test
186 D7919 - updated - 1849d7828727: create comment for phabricator test
187
187
188 Phabsending a skipped commit:
188 Phabsending a skipped commit:
189 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
189 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
190 1849d7828727 mapped to old nodes ['1849d7828727']
190 1849d7828727 mapped to old nodes ['1849d7828727']
191 D7919 - skipped - 1849d7828727: create comment for phabricator test
191 D7919 - skipped - 1849d7828727: create comment for phabricator test
192
192
193 Phabsend doesn't create an instability when restacking existing revisions on top
193 Phabsend doesn't create an instability when restacking existing revisions on top
194 of new revisions.
194 of new revisions.
195
195
196 $ hg init reorder
196 $ hg init reorder
197 $ cd reorder
197 $ cd reorder
198 $ cat >> .hg/hgrc <<EOF
198 $ cat >> .hg/hgrc <<EOF
199 > [phabricator]
199 > [phabricator]
200 > url = https://phab.mercurial-scm.org/
200 > url = https://phab.mercurial-scm.org/
201 > callsign = HG
201 > callsign = HG
202 > [experimental]
202 > [experimental]
203 > evolution = all
203 > evolution = all
204 > EOF
204 > EOF
205
205
206 $ echo "add" > file1.txt
206 $ echo "add" > file1.txt
207 $ hg ci -Aqm 'added'
207 $ hg ci -Aqm 'added'
208 $ echo "mod1" > file1.txt
208 $ echo "mod1" > file1.txt
209 $ hg ci -m 'modified 1'
209 $ hg ci -m 'modified 1'
210 $ echo "mod2" > file1.txt
210 $ echo "mod2" > file1.txt
211 $ hg ci -m 'modified 2'
211 $ hg ci -m 'modified 2'
212 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
212 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
213 D8433 - created - 5d3959e20d1d: modified 2
213 D8433 - created - 5d3959e20d1d: modified 2
214 new commits: ['2b4aa8a88d61']
214 new commits: ['2b4aa8a88d61']
215 $ hg log -G -T compact
215 $ hg log -G -T compact
216 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
216 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
217 | modified 2
217 | modified 2
218 |
218 |
219 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
219 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
220 | modified 1
220 | modified 1
221 |
221 |
222 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
222 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
223 added
223 added
224
224
225 Also check that it doesn't create more orphans outside of the stack
225 Also check that it doesn't create more orphans outside of the stack
226
226
227 $ hg up -q 1
227 $ hg up -q 1
228 $ echo "mod3" > file1.txt
228 $ echo "mod3" > file1.txt
229 $ hg ci -m 'modified 3'
229 $ hg ci -m 'modified 3'
230 created new head
230 created new head
231 $ hg up -q 3
231 $ hg up -q 3
232 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
232 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
233 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
233 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
234 D8434 - created - d549263bcb2d: modified 1
234 D8434 - created - d549263bcb2d: modified 1
235 D8433 - updated - 2b4aa8a88d61: modified 2
235 D8433 - updated - 2b4aa8a88d61: modified 2
236 new commits: ['876a60d024de']
236 new commits: ['876a60d024de']
237 new commits: ['0c6523cb1d0f']
237 new commits: ['0c6523cb1d0f']
238 restabilizing 1eda4bf55021 as d2c78c3a3e01
238 restabilizing 1eda4bf55021 as d2c78c3a3e01
239 $ hg log -G -T compact
239 $ hg log -G -T compact
240 o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
240 o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
241 | modified 3
241 | modified 3
242 |
242 |
243 | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
243 | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
244 |/ modified 2
244 |/ modified 2
245 |
245 |
246 o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
246 o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
247 | modified 1
247 | modified 1
248 |
248 |
249 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
249 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
250 added
250 added
251
251
252 Posting obsolete commits is disallowed
252 Posting obsolete commits is disallowed
253
253
254 $ echo "mod3" > file1.txt
254 $ echo "mod3" > file1.txt
255 $ hg ci -m 'modified A'
255 $ hg ci -m 'modified A'
256 $ echo "mod4" > file1.txt
256 $ echo "mod4" > file1.txt
257 $ hg ci -m 'modified B'
257 $ hg ci -m 'modified B'
258
258
259 $ hg up '.^'
259 $ hg up '.^'
260 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
260 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
261 $ echo 'obsolete' > file1.txt
261 $ echo 'obsolete' > file1.txt
262 $ hg amend --config extensions.amend=
262 $ hg amend --config extensions.amend=
263 1 new orphan changesets
263 1 new orphan changesets
264 $ hg log -G
264 $ hg log -G
265 @ changeset: 10:082be6c94150
265 @ changeset: 10:082be6c94150
266 | tag: tip
266 | tag: tip
267 | parent: 6:0c6523cb1d0f
267 | parent: 6:0c6523cb1d0f
268 | user: test
268 | user: test
269 | date: Thu Jan 01 00:00:00 1970 +0000
269 | date: Thu Jan 01 00:00:00 1970 +0000
270 | summary: modified A
270 | summary: modified A
271 |
271 |
272 | * changeset: 9:a67643f48146
272 | * changeset: 9:a67643f48146
273 | | user: test
273 | | user: test
274 | | date: Thu Jan 01 00:00:00 1970 +0000
274 | | date: Thu Jan 01 00:00:00 1970 +0000
275 | | instability: orphan
275 | | instability: orphan
276 | | summary: modified B
276 | | summary: modified B
277 | |
277 | |
278 | x changeset: 8:db79727cb2f7
278 | x changeset: 8:db79727cb2f7
279 |/ parent: 6:0c6523cb1d0f
279 |/ parent: 6:0c6523cb1d0f
280 | user: test
280 | user: test
281 | date: Thu Jan 01 00:00:00 1970 +0000
281 | date: Thu Jan 01 00:00:00 1970 +0000
282 | obsolete: rewritten using amend as 10:082be6c94150
282 | obsolete: rewritten using amend as 10:082be6c94150
283 | summary: modified A
283 | summary: modified A
284 |
284 |
285 | o changeset: 7:d2c78c3a3e01
285 | o changeset: 7:d2c78c3a3e01
286 | | parent: 5:876a60d024de
286 | | parent: 5:876a60d024de
287 | | user: test
287 | | user: test
288 | | date: Thu Jan 01 00:00:00 1970 +0000
288 | | date: Thu Jan 01 00:00:00 1970 +0000
289 | | summary: modified 3
289 | | summary: modified 3
290 | |
290 | |
291 o | changeset: 6:0c6523cb1d0f
291 o | changeset: 6:0c6523cb1d0f
292 |/ user: test
292 |/ user: test
293 | date: Thu Jan 01 00:00:00 1970 +0000
293 | date: Thu Jan 01 00:00:00 1970 +0000
294 | summary: modified 2
294 | summary: modified 2
295 |
295 |
296 o changeset: 5:876a60d024de
296 o changeset: 5:876a60d024de
297 | parent: 0:5cbade24e0fa
297 | parent: 0:5cbade24e0fa
298 | user: test
298 | user: test
299 | date: Thu Jan 01 00:00:00 1970 +0000
299 | date: Thu Jan 01 00:00:00 1970 +0000
300 | summary: modified 1
300 | summary: modified 1
301 |
301 |
302 o changeset: 0:5cbade24e0fa
302 o changeset: 0:5cbade24e0fa
303 user: test
303 user: test
304 date: Thu Jan 01 00:00:00 1970 +0000
304 date: Thu Jan 01 00:00:00 1970 +0000
305 summary: added
305 summary: added
306
306
307 $ hg phabsend -r 5::
307 $ hg phabsend -r 5::
308 abort: obsolete commits cannot be posted for review
308 abort: obsolete commits cannot be posted for review
309 [255]
309 [255]
310
310
311 Don't restack existing orphans
311 Don't restack existing orphans
312
312
313 $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
313 $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
314 876a60d024de mapped to old nodes ['876a60d024de']
314 876a60d024de mapped to old nodes ['876a60d024de']
315 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
315 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
316 D8434 - updated - 876a60d024de: modified 1
316 D8434 - updated - 876a60d024de: modified 1
317 D8433 - updated - 0c6523cb1d0f: modified 2
317 D8433 - updated - 0c6523cb1d0f: modified 2
318 D8435 - created - 082be6c94150: modified A
318 D8435 - created - 082be6c94150: modified A
319 new commits: ['b5913193c805']
319 new commits: ['b5913193c805']
320 not restabilizing unchanged d2c78c3a3e01
320 not restabilizing unchanged d2c78c3a3e01
321 $ hg log -G
321 $ hg log -G
322 @ changeset: 11:b5913193c805
322 @ changeset: 11:b5913193c805
323 | tag: tip
323 | tag: tip
324 | parent: 6:0c6523cb1d0f
324 | parent: 6:0c6523cb1d0f
325 | user: test
325 | user: test
326 | date: Thu Jan 01 00:00:00 1970 +0000
326 | date: Thu Jan 01 00:00:00 1970 +0000
327 | summary: modified A
327 | summary: modified A
328 |
328 |
329 | * changeset: 9:a67643f48146
329 | * changeset: 9:a67643f48146
330 | | user: test
330 | | user: test
331 | | date: Thu Jan 01 00:00:00 1970 +0000
331 | | date: Thu Jan 01 00:00:00 1970 +0000
332 | | instability: orphan
332 | | instability: orphan
333 | | summary: modified B
333 | | summary: modified B
334 | |
334 | |
335 | x changeset: 8:db79727cb2f7
335 | x changeset: 8:db79727cb2f7
336 |/ parent: 6:0c6523cb1d0f
336 |/ parent: 6:0c6523cb1d0f
337 | user: test
337 | user: test
338 | date: Thu Jan 01 00:00:00 1970 +0000
338 | date: Thu Jan 01 00:00:00 1970 +0000
339 | obsolete: rewritten using amend, phabsend as 11:b5913193c805
339 | obsolete: rewritten using amend, phabsend as 11:b5913193c805
340 | summary: modified A
340 | summary: modified A
341 |
341 |
342 | o changeset: 7:d2c78c3a3e01
342 | o changeset: 7:d2c78c3a3e01
343 | | parent: 5:876a60d024de
343 | | parent: 5:876a60d024de
344 | | user: test
344 | | user: test
345 | | date: Thu Jan 01 00:00:00 1970 +0000
345 | | date: Thu Jan 01 00:00:00 1970 +0000
346 | | summary: modified 3
346 | | summary: modified 3
347 | |
347 | |
348 o | changeset: 6:0c6523cb1d0f
348 o | changeset: 6:0c6523cb1d0f
349 |/ user: test
349 |/ user: test
350 | date: Thu Jan 01 00:00:00 1970 +0000
350 | date: Thu Jan 01 00:00:00 1970 +0000
351 | summary: modified 2
351 | summary: modified 2
352 |
352 |
353 o changeset: 5:876a60d024de
353 o changeset: 5:876a60d024de
354 | parent: 0:5cbade24e0fa
354 | parent: 0:5cbade24e0fa
355 | user: test
355 | user: test
356 | date: Thu Jan 01 00:00:00 1970 +0000
356 | date: Thu Jan 01 00:00:00 1970 +0000
357 | summary: modified 1
357 | summary: modified 1
358 |
358 |
359 o changeset: 0:5cbade24e0fa
359 o changeset: 0:5cbade24e0fa
360 user: test
360 user: test
361 date: Thu Jan 01 00:00:00 1970 +0000
361 date: Thu Jan 01 00:00:00 1970 +0000
362 summary: added
362 summary: added
363
363
364 $ cd ..
364 $ cd ..
365
365
366 Phabesending a new binary, a modified binary, and a removed binary
366 Phabesending a new binary, a modified binary, and a removed binary
367
367
368 >>> open('bin', 'wb').write(b'\0a') and None
368 >>> open('bin', 'wb').write(b'\0a') and None
369 $ hg ci -Am 'add binary'
369 $ hg ci -Am 'add binary'
370 adding bin
370 adding bin
371 >>> open('bin', 'wb').write(b'\0b') and None
371 >>> open('bin', 'wb').write(b'\0b') and None
372 $ hg ci -m 'modify binary'
372 $ hg ci -m 'modify binary'
373 $ hg rm bin
373 $ hg rm bin
374 $ hg ci -m 'remove binary'
374 $ hg ci -m 'remove binary'
375 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
375 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
376 uploading bin@aa24a81f55de
376 uploading bin@aa24a81f55de
377 D8007 - created - aa24a81f55de: add binary
377 D8007 - created - aa24a81f55de: add binary
378 uploading bin@d8d62a881b54
378 uploading bin@d8d62a881b54
379 D8008 - created - d8d62a881b54: modify binary
379 D8008 - created - d8d62a881b54: modify binary
380 D8009 - created - af55645b2e29: remove binary
380 D8009 - created - af55645b2e29: remove binary
381 new commits: ['b8139fbb4a57']
381 new commits: ['b8139fbb4a57']
382 new commits: ['c88ce4c2d2ad']
382 new commits: ['c88ce4c2d2ad']
383 new commits: ['75dbbc901145']
383 new commits: ['75dbbc901145']
384 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
384 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
385
385
386 Phabsend a renamed binary and a copied binary, with and without content changes
386 Phabsend a renamed binary and a copied binary, with and without content changes
387 to src and dest
387 to src and dest
388
388
389 >>> open('bin2', 'wb').write(b'\0c') and None
389 >>> open('bin2', 'wb').write(b'\0c') and None
390 $ hg ci -Am 'add another binary'
390 $ hg ci -Am 'add another binary'
391 adding bin2
391 adding bin2
392
392
393 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
393 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
394 looks much different than when viewing "bin2_moved". No idea if this is a phab
394 looks much different than when viewing "bin2_moved". No idea if this is a phab
395 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
395 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
396 though.
396 though.
397
397
398 $ hg mv bin2 bin2_moved
398 $ hg mv bin2 bin2_moved
399 $ hg ci -m "moved binary"
399 $ hg ci -m "moved binary"
400
400
401 Note: "bin2_moved" is also not viewable in phabricator with this review
401 Note: "bin2_moved" is also not viewable in phabricator with this review
402
402
403 $ hg cp bin2_moved bin2_copied
403 $ hg cp bin2_moved bin2_copied
404 $ hg ci -m "copied binary"
404 $ hg ci -m "copied binary"
405
405
406 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
406 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
407 are viewable in their proper state. "bin2_copied" is not viewable, and not
407 are viewable in their proper state. "bin2_copied" is not viewable, and not
408 listed as binary in phabricator.
408 listed as binary in phabricator.
409
409
410 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
410 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
411 $ hg mv bin2_copied bin2_moved_again
411 $ hg mv bin2_copied bin2_moved_again
412 $ hg ci -m "move+mod copied binary"
412 $ hg ci -m "move+mod copied binary"
413
413
414 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
414 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
415 viewable on each side.
415 viewable on each side.
416
416
417 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
417 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
418 $ hg cp bin2_moved bin2_moved_copied
418 $ hg cp bin2_moved bin2_moved_copied
419 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
419 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
420 $ hg ci -m "copy+mod moved binary"
420 $ hg ci -m "copy+mod moved binary"
421
421
422 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
422 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
423 uploading bin2@f42f9195e00c
423 uploading bin2@f42f9195e00c
424 D8128 - created - f42f9195e00c: add another binary
424 D8128 - created - f42f9195e00c: add another binary
425 D8129 - created - 834ab31d80ae: moved binary
425 D8129 - created - 834ab31d80ae: moved binary
426 D8130 - created - 494b750e5194: copied binary
426 D8130 - created - 494b750e5194: copied binary
427 uploading bin2_moved_again@25f766b50cc2
427 uploading bin2_moved_again@25f766b50cc2
428 D8131 - created - 25f766b50cc2: move+mod copied binary
428 D8131 - created - 25f766b50cc2: move+mod copied binary
429 uploading bin2_moved_copied@1b87b363a5e4
429 uploading bin2_moved_copied@1b87b363a5e4
430 uploading bin2_moved@1b87b363a5e4
430 uploading bin2_moved@1b87b363a5e4
431 D8132 - created - 1b87b363a5e4: copy+mod moved binary
431 D8132 - created - 1b87b363a5e4: copy+mod moved binary
432 new commits: ['90437c20312a']
432 new commits: ['90437c20312a']
433 new commits: ['f391f4da4c61']
433 new commits: ['f391f4da4c61']
434 new commits: ['da86a9f3268c']
434 new commits: ['da86a9f3268c']
435 new commits: ['003ffc16ba66']
435 new commits: ['003ffc16ba66']
436 new commits: ['13bd750c36fa']
436 new commits: ['13bd750c36fa']
437 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
437 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
438
438
439 Phabreading a DREV with a local:commits time as a string:
439 Phabreading a DREV with a local:commits time as a string:
440 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
440 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
441 # HG changeset patch
441 # HG changeset patch
442 # User Pulkit Goyal <7895pulkit@gmail.com>
442 # User Pulkit Goyal <7895pulkit@gmail.com>
443 # Date 1509404054 -19800
443 # Date 1509404054 -19800
444 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
444 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
445 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
445 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
446 repoview: add a new attribute _visibilityexceptions and related API
446 repoview: add a new attribute _visibilityexceptions and related API
447
447
448 Currently we don't have a defined way in core to make some hidden revisions
448 Currently we don't have a defined way in core to make some hidden revisions
449 visible in filtered repo. Extensions to achieve the purpose of unhiding some
449 visible in filtered repo. Extensions to achieve the purpose of unhiding some
450 hidden commits, wrap repoview.pinnedrevs() function.
450 hidden commits, wrap repoview.pinnedrevs() function.
451
451
452 To make the above task simple and have well defined API, this patch adds a new
452 To make the above task simple and have well defined API, this patch adds a new
453 attribute '_visibilityexceptions' to repoview class which will contains
453 attribute '_visibilityexceptions' to repoview class which will contains
454 the hidden revs which should be exception.
454 the hidden revs which should be exception.
455 This will allow to set different exceptions for different repoview objects
455 This will allow to set different exceptions for different repoview objects
456 backed by the same unfiltered repo.
456 backed by the same unfiltered repo.
457
457
458 This patch also adds API to add revs to the attribute set and get them.
458 This patch also adds API to add revs to the attribute set and get them.
459
459
460 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
460 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
461
461
462 Differential Revision: https://phab.mercurial-scm.org/D1285
462 Differential Revision: https://phab.mercurial-scm.org/D1285
463 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
463 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
464 --- a/mercurial/repoview.py
464 --- a/mercurial/repoview.py
465 +++ b/mercurial/repoview.py
465 +++ b/mercurial/repoview.py
466 @@ * @@ (glob)
466 @@ * @@ (glob)
467 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
467 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
468 """
468 """
469
469
470 + # hidden revs which should be visible
470 + # hidden revs which should be visible
471 + _visibilityexceptions = set()
471 + _visibilityexceptions = set()
472 +
472 +
473 def __init__(self, repo, filtername):
473 def __init__(self, repo, filtername):
474 object.__setattr__(self, r'_unfilteredrepo', repo)
474 object.__setattr__(self, r'_unfilteredrepo', repo)
475 object.__setattr__(self, r'filtername', filtername)
475 object.__setattr__(self, r'filtername', filtername)
476 @@ -231,6 +234,14 @@
476 @@ -231,6 +234,14 @@
477 return self
477 return self
478 return self.unfiltered().filtered(name)
478 return self.unfiltered().filtered(name)
479
479
480 + def addvisibilityexceptions(self, revs):
480 + def addvisibilityexceptions(self, revs):
481 + """adds hidden revs which should be visible to set of exceptions"""
481 + """adds hidden revs which should be visible to set of exceptions"""
482 + self._visibilityexceptions.update(revs)
482 + self._visibilityexceptions.update(revs)
483 +
483 +
484 + def getvisibilityexceptions(self):
484 + def getvisibilityexceptions(self):
485 + """returns the set of hidden revs which should be visible"""
485 + """returns the set of hidden revs which should be visible"""
486 + return self._visibilityexceptions
486 + return self._visibilityexceptions
487 +
487 +
488 # everything access are forwarded to the proxied repo
488 # everything access are forwarded to the proxied repo
489 def __getattr__(self, attr):
489 def __getattr__(self, attr):
490 return getattr(self._unfilteredrepo, attr)
490 return getattr(self._unfilteredrepo, attr)
491 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
491 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
492 --- a/mercurial/localrepo.py
492 --- a/mercurial/localrepo.py
493 +++ b/mercurial/localrepo.py
493 +++ b/mercurial/localrepo.py
494 @@ -570,6 +570,14 @@
494 @@ -570,6 +570,14 @@
495 def close(self):
495 def close(self):
496 self._writecaches()
496 self._writecaches()
497
497
498 + def addvisibilityexceptions(self, exceptions):
498 + def addvisibilityexceptions(self, exceptions):
499 + # should be called on a filtered repository
499 + # should be called on a filtered repository
500 + pass
500 + pass
501 +
501 +
502 + def getvisibilityexceptions(self):
502 + def getvisibilityexceptions(self):
503 + # should be called on a filtered repository
503 + # should be called on a filtered repository
504 + return set()
504 + return set()
505 +
505 +
506 def _loadextensions(self):
506 def _loadextensions(self):
507 extensions.loadall(self.ui)
507 extensions.loadall(self.ui)
508
508
509
509
510 A bad .arcconfig doesn't error out
510 A bad .arcconfig doesn't error out
511 $ echo 'garbage' > .arcconfig
511 $ echo 'garbage' > .arcconfig
512 $ hg config phabricator --debug
512 $ hg config phabricator --debug
513 invalid JSON in $TESTTMP/repo/.arcconfig
513 invalid JSON in $TESTTMP/repo/.arcconfig
514 read config from: */.hgrc (glob)
514 read config from: */.hgrc (glob)
515 */.hgrc:*: phabricator.debug=True (glob)
515 */.hgrc:*: phabricator.debug=True (glob)
516 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
516 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
517 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
517 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
518
518
519 The .arcconfig content overrides global config
519 The .arcconfig content overrides global config
520 $ cat >> $HGRCPATH << EOF
520 $ cat >> $HGRCPATH << EOF
521 > [phabricator]
521 > [phabricator]
522 > url = global
522 > url = global
523 > callsign = global
523 > callsign = global
524 > EOF
524 > EOF
525 $ cp $TESTDIR/../.arcconfig .
525 $ cp $TESTDIR/../.arcconfig .
526 $ mv .hg/hgrc .hg/hgrc.bak
526 $ mv .hg/hgrc .hg/hgrc.bak
527 $ hg config phabricator --debug
527 $ hg config phabricator --debug
528 read config from: */.hgrc (glob)
528 read config from: */.hgrc (glob)
529 */.hgrc:*: phabricator.debug=True (glob)
529 */.hgrc:*: phabricator.debug=True (glob)
530 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
530 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
531 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
531 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
532
532
533 But it doesn't override local config
533 But it doesn't override local config
534 $ cat >> .hg/hgrc << EOF
534 $ cat >> .hg/hgrc << EOF
535 > [phabricator]
535 > [phabricator]
536 > url = local
536 > url = local
537 > callsign = local
537 > callsign = local
538 > EOF
538 > EOF
539 $ hg config phabricator --debug
539 $ hg config phabricator --debug
540 read config from: */.hgrc (glob)
540 read config from: */.hgrc (glob)
541 */.hgrc:*: phabricator.debug=True (glob)
541 */.hgrc:*: phabricator.debug=True (glob)
542 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
542 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
543 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
543 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
544 $ mv .hg/hgrc.bak .hg/hgrc
544 $ mv .hg/hgrc.bak .hg/hgrc
545
545
546 Phabimport works with a stack
546 Phabimport works with a stack
547
547
548 $ cd ..
548 $ cd ..
549 $ hg clone repo repo2 -qr 1
549 $ hg clone repo repo2 -qr 1
550 $ cp repo/.hg/hgrc repo2/.hg/
550 $ cp repo/.hg/hgrc repo2/.hg/
551 $ cd repo2
551 $ cd repo2
552 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
552 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
553 applying patch from D7917
553 applying patch from D7917
554 applying patch from D7918
554 applying patch from D7918
555 $ hg log -r .: -G -Tcompact
555 $ hg log -r .: -G -Tcompact
556 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
556 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
557 | create draft change for phabricator testing
557 | create draft change for phabricator testing
558 |
558 |
559 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
559 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
560 | create public change for phabricator testing
560 | create public change for phabricator testing
561 |
561 |
562 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
562 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
563 | create beta for phabricator test
563 | create beta for phabricator test
564 ~
564 ~
565 Phabimport can create secret commits
565 Phabimport can create secret commits
566
566
567 $ hg rollback --config ui.rollback=True
567 $ hg rollback --config ui.rollback=True
568 repository tip rolled back to revision 1 (undo phabimport)
568 repository tip rolled back to revision 1 (undo phabimport)
569 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
569 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
570 > --config phabimport.secret=True
570 > --config phabimport.secret=True
571 applying patch from D7917
571 applying patch from D7917
572 applying patch from D7918
572 applying patch from D7918
573 $ hg log -r 'reverse(.:)' -T phases
573 $ hg log -r 'reverse(.:)' -T phases
574 changeset: 3:aaef04066140
574 changeset: 3:aaef04066140
575 tag: tip
575 tag: tip
576 phase: secret
576 phase: secret
577 user: test
577 user: test
578 date: Thu Jan 01 00:00:00 1970 +0000
578 date: Thu Jan 01 00:00:00 1970 +0000
579 summary: create draft change for phabricator testing
579 summary: create draft change for phabricator testing
580
580
581 changeset: 2:8de3712202d1
581 changeset: 2:8de3712202d1
582 phase: secret
582 phase: secret
583 user: test
583 user: test
584 date: Thu Jan 01 00:00:00 1970 +0000
584 date: Thu Jan 01 00:00:00 1970 +0000
585 summary: create public change for phabricator testing
585 summary: create public change for phabricator testing
586
586
587 changeset: 1:a692622e6937
587 changeset: 1:a692622e6937
588 phase: public
588 phase: public
589 user: test
589 user: test
590 date: Thu Jan 01 00:00:00 1970 +0000
590 date: Thu Jan 01 00:00:00 1970 +0000
591 summary: create beta for phabricator test
591 summary: create beta for phabricator test
592
592
593 Phabimport accepts multiple DREVSPECs
593 Phabimport accepts multiple DREVSPECs
594
594
595 $ hg rollback --config ui.rollback=True
595 $ hg rollback --config ui.rollback=True
596 repository tip rolled back to revision 1 (undo phabimport)
596 repository tip rolled back to revision 1 (undo phabimport)
597 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
597 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
598 applying patch from D7917
598 applying patch from D7917
599 applying patch from D7918
599 applying patch from D7918
600
600
601 Phabsend requires a linear range of commits
601 Phabsend requires a linear range of commits
602
602
603 $ hg phabsend -r 0+2+3
603 $ hg phabsend -r 0+2+3
604 abort: cannot phabsend multiple head revisions: c44b38f24a45 aaef04066140
604 abort: cannot phabsend multiple head revisions: c44b38f24a45 aaef04066140
605 (the revisions must form a linear chain)
605 (the revisions must form a linear chain)
606 [255]
606 [255]
607
607
608 Validate arguments with --fold
608 Validate arguments with --fold
609
609
610 $ hg phabsend --fold -r 1
610 $ hg phabsend --fold -r 1
611 abort: cannot fold a single revision
611 abort: cannot fold a single revision
612 [255]
612 [255]
613 $ hg phabsend --fold --no-amend -r 1::
613 $ hg phabsend --fold --no-amend -r 1::
614 abort: cannot fold with --no-amend
614 abort: cannot fold with --no-amend
615 [255]
615 [255]
616 $ hg phabsend --fold -r 1::
616 $ hg phabsend --fold -r 1::
617 abort: cannot fold revisions with different DREV values
617 abort: cannot fold revisions with different DREV values
618 [255]
618 [255]
619
619
620 Setup a series of commits to be folded, and include the Test Plan field multiple
620 Setup a series of commits to be folded, and include the Test Plan field multiple
621 times to test the concatenation logic. No Test Plan field in the last one to
621 times to test the concatenation logic. No Test Plan field in the last one to
622 ensure missing fields are skipped.
622 ensure missing fields are skipped.
623
623
624 $ hg init ../folded
624 $ hg init ../folded
625 $ cd ../folded
625 $ cd ../folded
626 $ cat >> .hg/hgrc <<EOF
626 $ cat >> .hg/hgrc <<EOF
627 > [phabricator]
627 > [phabricator]
628 > url = https://phab.mercurial-scm.org/
628 > url = https://phab.mercurial-scm.org/
629 > callsign = HG
629 > callsign = HG
630 > EOF
630 > EOF
631
631
632 $ echo 'added' > file.txt
632 $ echo 'added' > file.txt
633 $ hg ci -Aqm 'added file'
633 $ hg ci -Aqm 'added file'
634
634
635 $ cat > log.txt <<EOF
635 $ cat > log.txt <<EOF
636 > one: first commit to review
636 > one: first commit to review
637 >
637 >
638 > This file was modified with 'mod1' as its contents.
638 > This file was modified with 'mod1' as its contents.
639 >
639 >
640 > Test Plan:
640 > Test Plan:
641 > LOL! What testing?!
641 > LOL! What testing?!
642 > EOF
642 > EOF
643 $ echo mod1 > file.txt
643 $ echo mod1 > file.txt
644 $ hg ci -l log.txt
644 $ hg ci -l log.txt
645
645
646 $ cat > log.txt <<EOF
646 $ cat > log.txt <<EOF
647 > two: second commit to review
647 > two: second commit to review
648 >
648 >
649 > This file was modified with 'mod2' as its contents.
649 > This file was modified with 'mod2' as its contents.
650 >
650 >
651 > Test Plan:
651 > Test Plan:
652 > Haha! yeah, right.
652 > Haha! yeah, right.
653 >
653 >
654 > EOF
654 > EOF
655 $ echo mod2 > file.txt
655 $ echo mod2 > file.txt
656 $ hg ci -l log.txt
656 $ hg ci -l log.txt
657
657
658 $ echo mod3 > file.txt
658 $ echo mod3 > file.txt
659 $ hg ci -m '3: a commit with no detailed message'
659 $ hg ci -m '3: a commit with no detailed message'
660
660
661 The folding of immutable commits works...
661 The folding of immutable commits works...
662
662
663 $ hg phase -r tip --public
663 $ hg phase -r tip --public
664 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
664 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
665 D8386 - created - a959a3f69d8d: one: first commit to review
665 D8386 - created - a959a3f69d8d: one: first commit to review
666 D8386 - created - 24a4438154ba: two: second commit to review
666 D8386 - created - 24a4438154ba: two: second commit to review
667 D8386 - created - d235829e802c: 3: a commit with no detailed message
667 D8386 - created - d235829e802c: 3: a commit with no detailed message
668 warning: not updating public commit 1:a959a3f69d8d
668 warning: not updating public commit 1:a959a3f69d8d
669 warning: not updating public commit 2:24a4438154ba
669 warning: not updating public commit 2:24a4438154ba
670 warning: not updating public commit 3:d235829e802c
670 warning: not updating public commit 3:d235829e802c
671 no newnodes to update
671 no newnodes to update
672
672
673 $ hg phase -r 0 --draft --force
673 $ hg phase -r 0 --draft --force
674
674
675 ... as does the initial mutable fold...
675 ... as does the initial mutable fold...
676
676
677 $ echo y | hg phabsend --fold --confirm -r 1:: \
677 $ echo y | hg phabsend --fold --confirm -r 1:: \
678 > --test-vcr "$VCR/phabsend-fold-initial.json"
678 > --test-vcr "$VCR/phabsend-fold-initial.json"
679 NEW - a959a3f69d8d: one: first commit to review
679 NEW - a959a3f69d8d: one: first commit to review
680 NEW - 24a4438154ba: two: second commit to review
680 NEW - 24a4438154ba: two: second commit to review
681 NEW - d235829e802c: 3: a commit with no detailed message
681 NEW - d235829e802c: 3: a commit with no detailed message
682 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
682 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
683 D8387 - created - a959a3f69d8d: one: first commit to review
683 D8387 - created - a959a3f69d8d: one: first commit to review
684 D8387 - created - 24a4438154ba: two: second commit to review
684 D8387 - created - 24a4438154ba: two: second commit to review
685 D8387 - created - d235829e802c: 3: a commit with no detailed message
685 D8387 - created - d235829e802c: 3: a commit with no detailed message
686 updating local commit list for D8387
686 updating local commit list for D8387
687 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
687 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
688 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
688 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
689
689
690 ... and doesn't mangle the local commits.
690 ... and doesn't mangle the local commits.
691
691
692 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
692 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
693 3:921f8265efbd
693 3:921f8265efbd
694 3: a commit with no detailed message
694 3: a commit with no detailed message
695
695
696 Differential Revision: https://phab.mercurial-scm.org/D8387
696 Differential Revision: https://phab.mercurial-scm.org/D8387
697 2:832553266fe8
697 2:832553266fe8
698 two: second commit to review
698 two: second commit to review
699
699
700 This file was modified with 'mod2' as its contents.
700 This file was modified with 'mod2' as its contents.
701
701
702 Test Plan:
702 Test Plan:
703 Haha! yeah, right.
703 Haha! yeah, right.
704
704
705 Differential Revision: https://phab.mercurial-scm.org/D8387
705 Differential Revision: https://phab.mercurial-scm.org/D8387
706 1:602c4e738243
706 1:602c4e738243
707 one: first commit to review
707 one: first commit to review
708
708
709 This file was modified with 'mod1' as its contents.
709 This file was modified with 'mod1' as its contents.
710
710
711 Test Plan:
711 Test Plan:
712 LOL! What testing?!
712 LOL! What testing?!
713
713
714 Differential Revision: https://phab.mercurial-scm.org/D8387
714 Differential Revision: https://phab.mercurial-scm.org/D8387
715 0:98d480e0d494
715 0:98d480e0d494
716 added file
716 added file
717
717
718 Setup some obsmarkers by adding a file to the middle commit. This stress tests
718 Setup some obsmarkers by adding a file to the middle commit. This stress tests
719 getoldnodedrevmap() in later phabsends.
719 getoldnodedrevmap() in later phabsends.
720
720
721 $ hg up '.^'
721 $ hg up '.^'
722 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
722 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
723 $ echo 'modified' > file2.txt
723 $ echo 'modified' > file2.txt
724 $ hg add file2.txt
724 $ hg add file2.txt
725 $ hg amend --config experimental.evolution=all --config extensions.amend=
725 $ hg amend --config experimental.evolution=all --config extensions.amend=
726 1 new orphan changesets
726 1 new orphan changesets
727 $ hg up 3
727 $ hg up 3
728 obsolete feature not enabled but 1 markers found!
728 obsolete feature not enabled but 1 markers found!
729 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
729 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
730 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
730 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
731 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
731 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
732 rebasing 3:921f8265efbd "3: a commit with no detailed message"
732 rebasing 3:921f8265efbd "3: a commit with no detailed message"
733
733
734 When commits have changed locally, the local commit list on Phabricator is
734 When commits have changed locally, the local commit list on Phabricator is
735 updated.
735 updated.
736
736
737 $ echo y | hg phabsend --fold --confirm -r 1:: \
737 $ echo y | hg phabsend --fold --confirm -r 1:: \
738 > --test-vcr "$VCR/phabsend-fold-updated.json"
738 > --test-vcr "$VCR/phabsend-fold-updated.json"
739 obsolete feature not enabled but 2 markers found!
739 obsolete feature not enabled but 2 markers found!
740 602c4e738243 mapped to old nodes ['602c4e738243']
740 602c4e738243 mapped to old nodes ['602c4e738243']
741 0124e5474c88 mapped to old nodes ['832553266fe8']
741 0124e5474c88 mapped to old nodes ['832553266fe8']
742 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
742 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
743 D8387 - 602c4e738243: one: first commit to review
743 D8387 - 602c4e738243: one: first commit to review
744 D8387 - 0124e5474c88: two: second commit to review
744 D8387 - 0124e5474c88: two: second commit to review
745 D8387 - e4edb1fe3565: 3: a commit with no detailed message
745 D8387 - e4edb1fe3565: 3: a commit with no detailed message
746 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
746 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
747 D8387 - updated - 602c4e738243: one: first commit to review
747 D8387 - updated - 602c4e738243: one: first commit to review
748 D8387 - updated - 0124e5474c88: two: second commit to review
748 D8387 - updated - 0124e5474c88: two: second commit to review
749 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
749 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
750 obsolete feature not enabled but 2 markers found! (?)
750 obsolete feature not enabled but 2 markers found! (?)
751 updating local commit list for D8387
751 updating local commit list for D8387
752 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
752 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
753 $ hg log -Tcompact
753 $ hg log -Tcompact
754 obsolete feature not enabled but 2 markers found!
754 obsolete feature not enabled but 2 markers found!
755 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
755 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
756 3: a commit with no detailed message
756 3: a commit with no detailed message
757
757
758 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
758 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
759 two: second commit to review
759 two: second commit to review
760
760
761 1 602c4e738243 1970-01-01 00:00 +0000 test
761 1 602c4e738243 1970-01-01 00:00 +0000 test
762 one: first commit to review
762 one: first commit to review
763
763
764 0 98d480e0d494 1970-01-01 00:00 +0000 test
764 0 98d480e0d494 1970-01-01 00:00 +0000 test
765 added file
765 added file
766
766
767 When nothing has changed locally since the last phabsend, the commit list isn't
767 When nothing has changed locally since the last phabsend, the commit list isn't
768 updated, and nothing is changed locally afterward.
768 updated, and nothing is changed locally afterward.
769
769
770 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
770 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
771 obsolete feature not enabled but 2 markers found!
771 obsolete feature not enabled but 2 markers found!
772 602c4e738243 mapped to old nodes ['602c4e738243']
772 602c4e738243 mapped to old nodes ['602c4e738243']
773 0124e5474c88 mapped to old nodes ['0124e5474c88']
773 0124e5474c88 mapped to old nodes ['0124e5474c88']
774 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
774 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
775 D8387 - updated - 602c4e738243: one: first commit to review
775 D8387 - updated - 602c4e738243: one: first commit to review
776 D8387 - updated - 0124e5474c88: two: second commit to review
776 D8387 - updated - 0124e5474c88: two: second commit to review
777 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
777 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
778 obsolete feature not enabled but 2 markers found! (?)
778 obsolete feature not enabled but 2 markers found! (?)
779 local commit list for D8387 is already up-to-date
779 local commit list for D8387 is already up-to-date
780 $ hg log -Tcompact
780 $ hg log -Tcompact
781 obsolete feature not enabled but 2 markers found!
781 obsolete feature not enabled but 2 markers found!
782 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
782 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
783 3: a commit with no detailed message
783 3: a commit with no detailed message
784
784
785 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
785 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
786 two: second commit to review
786 two: second commit to review
787
787
788 1 602c4e738243 1970-01-01 00:00 +0000 test
788 1 602c4e738243 1970-01-01 00:00 +0000 test
789 one: first commit to review
789 one: first commit to review
790
790
791 0 98d480e0d494 1970-01-01 00:00 +0000 test
791 0 98d480e0d494 1970-01-01 00:00 +0000 test
792 added file
792 added file
793
793
794 Fold will accept new revisions at the end...
794 Fold will accept new revisions at the end...
795
795
796 $ echo 'another mod' > file2.txt
796 $ echo 'another mod' > file2.txt
797 $ hg ci -m 'four: extend the fold range'
797 $ hg ci -m 'four: extend the fold range'
798 obsolete feature not enabled but 2 markers found!
798 obsolete feature not enabled but 2 markers found!
799 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
799 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
800 > --config experimental.evolution=all
800 > --config experimental.evolution=all
801 602c4e738243 mapped to old nodes ['602c4e738243']
801 602c4e738243 mapped to old nodes ['602c4e738243']
802 0124e5474c88 mapped to old nodes ['0124e5474c88']
802 0124e5474c88 mapped to old nodes ['0124e5474c88']
803 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
803 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
804 D8387 - updated - 602c4e738243: one: first commit to review
804 D8387 - updated - 602c4e738243: one: first commit to review
805 D8387 - updated - 0124e5474c88: two: second commit to review
805 D8387 - updated - 0124e5474c88: two: second commit to review
806 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
806 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
807 D8387 - created - 94aaae213b23: four: extend the fold range
807 D8387 - created - 94aaae213b23: four: extend the fold range
808 updating local commit list for D8387
808 updating local commit list for D8387
809 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
809 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
810 $ hg log -r . -T '{desc}\n'
810 $ hg log -r . -T '{desc}\n'
811 four: extend the fold range
811 four: extend the fold range
812
812
813 Differential Revision: https://phab.mercurial-scm.org/D8387
813 Differential Revision: https://phab.mercurial-scm.org/D8387
814 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
814 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
815 obsolete feature not enabled but 3 markers found!
815 obsolete feature not enabled but 3 markers found!
816 1 https://phab.mercurial-scm.org/D8387 D8387
816 1 https://phab.mercurial-scm.org/D8387 D8387
817 4 https://phab.mercurial-scm.org/D8387 D8387
817 4 https://phab.mercurial-scm.org/D8387 D8387
818 5 https://phab.mercurial-scm.org/D8387 D8387
818 5 https://phab.mercurial-scm.org/D8387 D8387
819 7 https://phab.mercurial-scm.org/D8387 D8387
819 7 https://phab.mercurial-scm.org/D8387 D8387
820
820
821 ... and also accepts new revisions at the beginning of the range
821 ... and also accepts new revisions at the beginning of the range
822
822
823 It's a bit unfortunate that not having a Differential URL on the first commit
823 It's a bit unfortunate that not having a Differential URL on the first commit
824 causes a new Differential Revision to be created, though it isn't *entirely*
824 causes a new Differential Revision to be created, though it isn't *entirely*
825 unreasonable. At least this updates the subsequent commits.
825 unreasonable. At least this updates the subsequent commits.
826
826
827 TODO: See if it can reuse the existing Differential.
827 TODO: See if it can reuse the existing Differential.
828
828
829 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
829 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
830 > --config experimental.evolution=all
830 > --config experimental.evolution=all
831 602c4e738243 mapped to old nodes ['602c4e738243']
831 602c4e738243 mapped to old nodes ['602c4e738243']
832 0124e5474c88 mapped to old nodes ['0124e5474c88']
832 0124e5474c88 mapped to old nodes ['0124e5474c88']
833 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
833 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
834 51a04fea8707 mapped to old nodes ['51a04fea8707']
834 51a04fea8707 mapped to old nodes ['51a04fea8707']
835 D8388 - created - 98d480e0d494: added file
835 D8388 - created - 98d480e0d494: added file
836 D8388 - updated - 602c4e738243: one: first commit to review
836 D8388 - updated - 602c4e738243: one: first commit to review
837 D8388 - updated - 0124e5474c88: two: second commit to review
837 D8388 - updated - 0124e5474c88: two: second commit to review
838 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
838 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
839 D8388 - updated - 51a04fea8707: four: extend the fold range
839 D8388 - updated - 51a04fea8707: four: extend the fold range
840 updating local commit list for D8388
840 updating local commit list for D8388
841 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
841 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
842
842
843 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
843 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
844 obsolete feature not enabled but 8 markers found!
844 obsolete feature not enabled but 8 markers found!
845 12:ac7db67f0991
845 12:ac7db67f0991
846 four: extend the fold range
846 four: extend the fold range
847
847
848 Differential Revision: https://phab.mercurial-scm.org/D8388
848 Differential Revision: https://phab.mercurial-scm.org/D8388
849 11:30682b960804
849 11:30682b960804
850 3: a commit with no detailed message
850 3: a commit with no detailed message
851
851
852 Differential Revision: https://phab.mercurial-scm.org/D8388
852 Differential Revision: https://phab.mercurial-scm.org/D8388
853 10:3ee132d41dbc
853 10:3ee132d41dbc
854 two: second commit to review
854 two: second commit to review
855
855
856 This file was modified with 'mod2' as its contents.
856 This file was modified with 'mod2' as its contents.
857
857
858 Test Plan:
858 Test Plan:
859 Haha! yeah, right.
859 Haha! yeah, right.
860
860
861 Differential Revision: https://phab.mercurial-scm.org/D8388
861 Differential Revision: https://phab.mercurial-scm.org/D8388
862 9:6320b7d714cf
862 9:6320b7d714cf
863 one: first commit to review
863 one: first commit to review
864
864
865 This file was modified with 'mod1' as its contents.
865 This file was modified with 'mod1' as its contents.
866
866
867 Test Plan:
867 Test Plan:
868 LOL! What testing?!
868 LOL! What testing?!
869
869
870 Differential Revision: https://phab.mercurial-scm.org/D8388
870 Differential Revision: https://phab.mercurial-scm.org/D8388
871 8:15e9b14b4b4c
871 8:15e9b14b4b4c
872 added file
872 added file
873
873
874 Differential Revision: https://phab.mercurial-scm.org/D8388
874 Differential Revision: https://phab.mercurial-scm.org/D8388
875
875
876 Test phabsend --fold with an `hg split` at the end of the range
876 Test phabsend --fold with an `hg split` at the end of the range
877
877
878 $ echo foo > file3.txt
878 $ echo foo > file3.txt
879 $ hg add file3.txt
879 $ hg add file3.txt
880
880
881 $ hg log -r . -T '{desc}' > log.txt
881 $ hg log -r . -T '{desc}' > log.txt
882 $ echo 'amended mod' > file2.txt
882 $ echo 'amended mod' > file2.txt
883 $ hg ci --amend -l log.txt --config experimental.evolution=all
883 $ hg ci --amend -l log.txt --config experimental.evolution=all
884
884
885 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
885 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
886 > --config experimental.evolution=all split -r .
886 > --config experimental.evolution=all split -r .
887 > n
887 > n
888 > y
888 > y
889 > y
889 > y
890 > y
890 > y
891 > y
891 > y
892 > EOF
892 > EOF
893 diff --git a/file2.txt b/file2.txt
893 diff --git a/file2.txt b/file2.txt
894 1 hunks, 1 lines changed
894 1 hunks, 1 lines changed
895 examine changes to 'file2.txt'?
895 examine changes to 'file2.txt'?
896 (enter ? for help) [Ynesfdaq?] n
896 (enter ? for help) [Ynesfdaq?] n
897
897
898 diff --git a/file3.txt b/file3.txt
898 diff --git a/file3.txt b/file3.txt
899 new file mode 100644
899 new file mode 100644
900 examine changes to 'file3.txt'?
900 examine changes to 'file3.txt'?
901 (enter ? for help) [Ynesfdaq?] y
901 (enter ? for help) [Ynesfdaq?] y
902
902
903 @@ -0,0 +1,1 @@
903 @@ -0,0 +1,1 @@
904 +foo
904 +foo
905 record change 2/2 to 'file3.txt'?
905 record change 2/2 to 'file3.txt'?
906 (enter ? for help) [Ynesfdaq?] y
906 (enter ? for help) [Ynesfdaq?] y
907
907
908 created new head
908 created new head
909 diff --git a/file2.txt b/file2.txt
909 diff --git a/file2.txt b/file2.txt
910 1 hunks, 1 lines changed
910 1 hunks, 1 lines changed
911 examine changes to 'file2.txt'?
911 examine changes to 'file2.txt'?
912 (enter ? for help) [Ynesfdaq?] y
912 (enter ? for help) [Ynesfdaq?] y
913
913
914 @@ -1,1 +1,1 @@
914 @@ -1,1 +1,1 @@
915 -modified
915 -modified
916 +amended mod
916 +amended mod
917 record this change to 'file2.txt'?
917 record this change to 'file2.txt'?
918 (enter ? for help) [Ynesfdaq?] y
918 (enter ? for help) [Ynesfdaq?] y
919
919
920 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
920 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
921 > --config experimental.evolution=all
921 > --config experimental.evolution=all
922 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
922 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
923 6320b7d714cf mapped to old nodes ['6320b7d714cf']
923 6320b7d714cf mapped to old nodes ['6320b7d714cf']
924 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
924 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
925 30682b960804 mapped to old nodes ['30682b960804']
925 30682b960804 mapped to old nodes ['30682b960804']
926 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
926 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
927 b50946d5e490 mapped to old nodes ['ac7db67f0991']
927 b50946d5e490 mapped to old nodes ['ac7db67f0991']
928 D8388 - updated - 15e9b14b4b4c: added file
928 D8388 - updated - 15e9b14b4b4c: added file
929 D8388 - updated - 6320b7d714cf: one: first commit to review
929 D8388 - updated - 6320b7d714cf: one: first commit to review
930 D8388 - updated - 3ee132d41dbc: two: second commit to review
930 D8388 - updated - 3ee132d41dbc: two: second commit to review
931 D8388 - updated - 30682b960804: 3: a commit with no detailed message
931 D8388 - updated - 30682b960804: 3: a commit with no detailed message
932 D8388 - updated - 6bc15dc99efd: four: extend the fold range
932 D8388 - updated - 6bc15dc99efd: four: extend the fold range
933 D8388 - updated - b50946d5e490: four: extend the fold range
933 D8388 - updated - b50946d5e490: four: extend the fold range
934 updating local commit list for D8388
934 updating local commit list for D8388
935 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
935 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
936
936
937 Test phabsend --fold with an `hg fold` at the end of the range
937 Test phabsend --fold with an `hg fold` at the end of the range
938
938
939 $ hg --config experimental.evolution=all --config extensions.rebase= \
939 $ hg --config experimental.evolution=all --config extensions.rebase= \
940 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
940 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
941 rebasing 14:6bc15dc99efd "four: extend the fold range"
941 rebasing 14:6bc15dc99efd "four: extend the fold range"
942 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
942 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
943
943
944 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
944 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
945 > --config experimental.evolution=all
945 > --config experimental.evolution=all
946 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
946 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
947 6320b7d714cf mapped to old nodes ['6320b7d714cf']
947 6320b7d714cf mapped to old nodes ['6320b7d714cf']
948 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
948 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
949 30682b960804 mapped to old nodes ['30682b960804']
949 30682b960804 mapped to old nodes ['30682b960804']
950 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
950 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
951 D8388 - updated - 15e9b14b4b4c: added file
951 D8388 - updated - 15e9b14b4b4c: added file
952 D8388 - updated - 6320b7d714cf: one: first commit to review
952 D8388 - updated - 6320b7d714cf: one: first commit to review
953 D8388 - updated - 3ee132d41dbc: two: second commit to review
953 D8388 - updated - 3ee132d41dbc: two: second commit to review
954 D8388 - updated - 30682b960804: 3: a commit with no detailed message
954 D8388 - updated - 30682b960804: 3: a commit with no detailed message
955 D8388 - updated - e919cdf3d4fe: four: extend the fold range
955 D8388 - updated - e919cdf3d4fe: four: extend the fold range
956 updating local commit list for D8388
956 updating local commit list for D8388
957 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
957 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
958
958
959 $ hg log -r tip -v
959 $ hg log -r tip -v
960 obsolete feature not enabled but 12 markers found!
960 obsolete feature not enabled but 12 markers found!
961 changeset: 16:e919cdf3d4fe
961 changeset: 16:e919cdf3d4fe
962 tag: tip
962 tag: tip
963 parent: 11:30682b960804
963 parent: 11:30682b960804
964 user: test
964 user: test
965 date: Thu Jan 01 00:00:00 1970 +0000
965 date: Thu Jan 01 00:00:00 1970 +0000
966 files: file2.txt file3.txt
966 files: file2.txt file3.txt
967 description:
967 description:
968 four: extend the fold range
968 four: extend the fold range
969
969
970 Differential Revision: https://phab.mercurial-scm.org/D8388
970 Differential Revision: https://phab.mercurial-scm.org/D8388
971
971
972
972
973 Hashes in the messages are updated automatically as phabsend amends and restacks
974 them. This covers both commits that are posted and descendants that are
975 restacked.
973
976
977 $ cat >> .hg/hgrc << EOF
978 > [experimental]
979 > evolution = all
980 > EOF
981
982 $ echo content > file.txt
983 $ hg ci -m 'base review (generate test for phabsend)'
984 $ echo 'more content' > file.txt
985 $ hg ci -m '133c1c6c6449 is my parent (generate test for phabsend)'
986 $ echo 'even more content' > file.txt
987 $ hg ci -m 'c2874a398f7e is my parent (generate test for phabsend)'
988
989 $ hg phabsend -r 17::18 --test-vcr "$VCR/phabsend-hash-fixes.json"
990 D8945 - created - 133c1c6c6449: base review (generate test for phabsend)
991 D8946 - created - c2874a398f7e: 133c1c6c6449 is my parent (generate test for phabsend)
992 new commits: ['f444f060f4d6']
993 new commits: ['9c9290f945b1']
994 restabilizing 1528c12fa2e4 as b28b20212bd4
995
996 $ hg log -l 3 -Tcompact
997 22[tip] b28b20212bd4 1970-01-01 00:00 +0000 test
998 9c9290f945b1 is my parent (generate test for phabsend)
999
1000 21 9c9290f945b1 1970-01-01 00:00 +0000 test
1001 f444f060f4d6 is my parent (generate test for phabsend)
1002
1003 20:16 f444f060f4d6 1970-01-01 00:00 +0000 test
1004 base review (generate test for phabsend)
1005
974 $ cd ..
1006 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now