##// END OF EJS Templates
phabricator: restack any new orphans created by phabsend (issue6045)...
Matt Harbison -
r45214:601ce539 default
parent child Browse files
Show More
This diff has been collapsed as it changes many lines, (1229 lines changed) Show them Hide them
@@ -0,0 +1,1229 b''
1 {
2 "version": 1,
3 "interactions": [
4 {
5 "response": {
6 "headers": {
7 "referrer-policy": [
8 "no-referrer"
9 ],
10 "x-frame-options": [
11 "Deny"
12 ],
13 "date": [
14 "Wed, 15 Apr 2020 23:43:54 GMT"
15 ],
16 "server": [
17 "Apache/2.4.10 (Debian)"
18 ],
19 "content-type": [
20 "application/json"
21 ],
22 "expires": [
23 "Sat, 01 Jan 2000 00:00:00 GMT"
24 ],
25 "transfer-encoding": [
26 "chunked"
27 ],
28 "x-xss-protection": [
29 "1; mode=block"
30 ],
31 "strict-transport-security": [
32 "max-age=0; includeSubdomains; preload"
33 ],
34 "cache-control": [
35 "no-store"
36 ],
37 "x-content-type-options": [
38 "nosniff"
39 ]
40 },
41 "status": {
42 "code": 200,
43 "message": "OK"
44 },
45 "body": {
46 "string": "{\"result\":{\"21111\":{\"id\":\"21111\",\"revisionID\":\"8434\",\"dateCreated\":\"1586994225\",\"dateModified\":\"1586994227\",\"sourceControlBaseRevision\":\"5cbade24e0fae40d67c568e86a978a2a946b9aed\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"57079\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"xfwwSr8O83OT\"},\"oldPath\":\"file1.txt\",\"currentPath\":\"file1.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-add\\n+mod1\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"parent\":\"5cbade24e0fae40d67c568e86a978a2a946b9aed\",\"user\":\"test\"},\"local:commits\":{\"876a60d024de493e35a1c6f963f2604056cdc0b9\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"parents\":[\"5cbade24e0fae40d67c568e86a978a2a946b9aed\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"21110\":{\"id\":\"21110\",\"revisionID\":\"8433\",\"dateCreated\":\"1586994219\",\"dateModified\":\"1586994221\",\"sourceControlBaseRevision\":\"d549263bcb2db54042adf048047b368f1ed246df\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"57078\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"ftEQkHimiyJo\"},\"oldPath\":\"file1.txt\",\"currentPath\":\"file1.txt\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-mod1\\n+mod2\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\",\"parent\":\"876a60d024de493e35a1c6f963f2604056cdc0b9\",\"user\":\"test\"},\"local:commits\":{\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"0c6523cb1d0f560a958bcc0f4f938c91cb1141dc\",\"parents\":[\"876a60d024de493e35a1c6f963f2604056cdc0b9\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
47 }
48 },
49 "request": {
50 "headers": {
51 "content-length": [
52 "162"
53 ],
54 "content-type": [
55 "application/x-www-form-urlencoded"
56 ],
57 "user-agent": [
58 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
59 ],
60 "accept": [
61 "application/mercurial-0.1"
62 ],
63 "host": [
64 "phab.mercurial-scm.org"
65 ]
66 },
67 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22revisionIDs%22%3A+%5B8434%2C+8433%5D%7D",
68 "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
69 "method": "POST"
70 }
71 },
72 {
73 "response": {
74 "headers": {
75 "referrer-policy": [
76 "no-referrer"
77 ],
78 "x-frame-options": [
79 "Deny"
80 ],
81 "date": [
82 "Wed, 15 Apr 2020 23:43:55 GMT"
83 ],
84 "server": [
85 "Apache/2.4.10 (Debian)"
86 ],
87 "content-type": [
88 "application/json"
89 ],
90 "expires": [
91 "Sat, 01 Jan 2000 00:00:00 GMT"
92 ],
93 "transfer-encoding": [
94 "chunked"
95 ],
96 "x-xss-protection": [
97 "1; mode=block"
98 ],
99 "strict-transport-security": [
100 "max-age=0; includeSubdomains; preload"
101 ],
102 "cache-control": [
103 "no-store"
104 ],
105 "x-content-type-options": [
106 "nosniff"
107 ]
108 },
109 "status": {
110 "code": 200,
111 "message": "OK"
112 },
113 "body": {
114 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
115 }
116 },
117 "request": {
118 "headers": {
119 "content-length": [
120 "488"
121 ],
122 "content-type": [
123 "application/x-www-form-urlencoded"
124 ],
125 "user-agent": [
126 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
127 ],
128 "accept": [
129 "application/mercurial-0.1"
130 ],
131 "host": [
132 "phab.mercurial-scm.org"
133 ]
134 },
135 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parent%5C%22%3A+%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221111%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
136 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
137 "method": "POST"
138 }
139 },
140 {
141 "response": {
142 "headers": {
143 "referrer-policy": [
144 "no-referrer"
145 ],
146 "x-frame-options": [
147 "Deny"
148 ],
149 "date": [
150 "Wed, 15 Apr 2020 23:43:55 GMT"
151 ],
152 "server": [
153 "Apache/2.4.10 (Debian)"
154 ],
155 "content-type": [
156 "application/json"
157 ],
158 "expires": [
159 "Sat, 01 Jan 2000 00:00:00 GMT"
160 ],
161 "transfer-encoding": [
162 "chunked"
163 ],
164 "x-xss-protection": [
165 "1; mode=block"
166 ],
167 "strict-transport-security": [
168 "max-age=0; includeSubdomains; preload"
169 ],
170 "cache-control": [
171 "no-store"
172 ],
173 "x-content-type-options": [
174 "nosniff"
175 ]
176 },
177 "status": {
178 "code": 200,
179 "message": "OK"
180 },
181 "body": {
182 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
183 }
184 },
185 "request": {
186 "headers": {
187 "content-length": [
188 "600"
189 ],
190 "content-type": [
191 "application/x-www-form-urlencoded"
192 ],
193 "user-agent": [
194 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
195 ],
196 "accept": [
197 "application/mercurial-0.1"
198 ],
199 "host": [
200 "phab.mercurial-scm.org"
201 ]
202 },
203 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%225cbade24e0fae40d67c568e86a978a2a946b9aed%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221111%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
204 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
205 "method": "POST"
206 }
207 },
208 {
209 "response": {
210 "headers": {
211 "referrer-policy": [
212 "no-referrer"
213 ],
214 "x-frame-options": [
215 "Deny"
216 ],
217 "date": [
218 "Wed, 15 Apr 2020 23:43:56 GMT"
219 ],
220 "server": [
221 "Apache/2.4.10 (Debian)"
222 ],
223 "content-type": [
224 "application/json"
225 ],
226 "expires": [
227 "Sat, 01 Jan 2000 00:00:00 GMT"
228 ],
229 "transfer-encoding": [
230 "chunked"
231 ],
232 "x-xss-protection": [
233 "1; mode=block"
234 ],
235 "strict-transport-security": [
236 "max-age=0; includeSubdomains; preload"
237 ],
238 "cache-control": [
239 "no-store"
240 ],
241 "x-content-type-options": [
242 "nosniff"
243 ]
244 },
245 "status": {
246 "code": 200,
247 "message": "OK"
248 },
249 "body": {
250 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 1\",\"revisionID\":8434},\"revisionIDFieldInfo\":{\"value\":8434,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 1\"}]},\"error_code\":null,\"error_info\":null}"
251 }
252 },
253 "request": {
254 "headers": {
255 "content-length": [
256 "232"
257 ],
258 "content-type": [
259 "application/x-www-form-urlencoded"
260 ],
261 "user-agent": [
262 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
263 ],
264 "accept": [
265 "application/mercurial-0.1"
266 ],
267 "host": [
268 "phab.mercurial-scm.org"
269 ]
270 },
271 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+1%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8434%22%7D",
272 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
273 "method": "POST"
274 }
275 },
276 {
277 "response": {
278 "headers": {
279 "referrer-policy": [
280 "no-referrer"
281 ],
282 "x-frame-options": [
283 "Deny"
284 ],
285 "date": [
286 "Wed, 15 Apr 2020 23:43:56 GMT"
287 ],
288 "server": [
289 "Apache/2.4.10 (Debian)"
290 ],
291 "content-type": [
292 "application/json"
293 ],
294 "expires": [
295 "Sat, 01 Jan 2000 00:00:00 GMT"
296 ],
297 "transfer-encoding": [
298 "chunked"
299 ],
300 "x-xss-protection": [
301 "1; mode=block"
302 ],
303 "strict-transport-security": [
304 "max-age=0; includeSubdomains; preload"
305 ],
306 "cache-control": [
307 "no-store"
308 ],
309 "x-content-type-options": [
310 "nosniff"
311 ]
312 },
313 "status": {
314 "code": 200,
315 "message": "OK"
316 },
317 "body": {
318 "string": "{\"result\":{\"object\":{\"id\":8434,\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\"},\"transactions\":[]},\"error_code\":null,\"error_info\":null}"
319 }
320 },
321 "request": {
322 "headers": {
323 "content-length": [
324 "251"
325 ],
326 "content-type": [
327 "application/x-www-form-urlencoded"
328 ],
329 "user-agent": [
330 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
331 ],
332 "accept": [
333 "application/mercurial-0.1"
334 ],
335 "host": [
336 "phab.mercurial-scm.org"
337 ]
338 },
339 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8434%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+1%22%7D%5D%7D",
340 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
341 "method": "POST"
342 }
343 },
344 {
345 "response": {
346 "headers": {
347 "referrer-policy": [
348 "no-referrer"
349 ],
350 "x-frame-options": [
351 "Deny"
352 ],
353 "date": [
354 "Wed, 15 Apr 2020 23:43:57 GMT"
355 ],
356 "server": [
357 "Apache/2.4.10 (Debian)"
358 ],
359 "content-type": [
360 "application/json"
361 ],
362 "expires": [
363 "Sat, 01 Jan 2000 00:00:00 GMT"
364 ],
365 "transfer-encoding": [
366 "chunked"
367 ],
368 "x-xss-protection": [
369 "1; mode=block"
370 ],
371 "strict-transport-security": [
372 "max-age=0; includeSubdomains; preload"
373 ],
374 "cache-control": [
375 "no-store"
376 ],
377 "x-content-type-options": [
378 "nosniff"
379 ]
380 },
381 "status": {
382 "code": 200,
383 "message": "OK"
384 },
385 "body": {
386 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
387 }
388 },
389 "request": {
390 "headers": {
391 "content-length": [
392 "488"
393 ],
394 "content-type": [
395 "application/x-www-form-urlencoded"
396 ],
397 "user-agent": [
398 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
399 ],
400 "accept": [
401 "application/mercurial-0.1"
402 ],
403 "host": [
404 "phab.mercurial-scm.org"
405 ]
406 },
407 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parent%5C%22%3A+%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
408 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
409 "method": "POST"
410 }
411 },
412 {
413 "response": {
414 "headers": {
415 "referrer-policy": [
416 "no-referrer"
417 ],
418 "x-frame-options": [
419 "Deny"
420 ],
421 "date": [
422 "Wed, 15 Apr 2020 23:43:57 GMT"
423 ],
424 "server": [
425 "Apache/2.4.10 (Debian)"
426 ],
427 "content-type": [
428 "application/json"
429 ],
430 "expires": [
431 "Sat, 01 Jan 2000 00:00:00 GMT"
432 ],
433 "transfer-encoding": [
434 "chunked"
435 ],
436 "x-xss-protection": [
437 "1; mode=block"
438 ],
439 "strict-transport-security": [
440 "max-age=0; includeSubdomains; preload"
441 ],
442 "cache-control": [
443 "no-store"
444 ],
445 "x-content-type-options": [
446 "nosniff"
447 ]
448 },
449 "status": {
450 "code": 200,
451 "message": "OK"
452 },
453 "body": {
454 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
455 }
456 },
457 "request": {
458 "headers": {
459 "content-length": [
460 "600"
461 ],
462 "content-type": [
463 "application/x-www-form-urlencoded"
464 ],
465 "user-agent": [
466 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
467 ],
468 "accept": [
469 "application/mercurial-0.1"
470 ],
471 "host": [
472 "phab.mercurial-scm.org"
473 ]
474 },
475 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%22876a60d024de493e35a1c6f963f2604056cdc0b9%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+%2221110%22%2C+%22name%22%3A+%22local%3Acommits%22%7D",
476 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
477 "method": "POST"
478 }
479 },
480 {
481 "response": {
482 "headers": {
483 "referrer-policy": [
484 "no-referrer"
485 ],
486 "x-frame-options": [
487 "Deny"
488 ],
489 "date": [
490 "Wed, 15 Apr 2020 23:43:58 GMT"
491 ],
492 "server": [
493 "Apache/2.4.10 (Debian)"
494 ],
495 "content-type": [
496 "application/json"
497 ],
498 "expires": [
499 "Sat, 01 Jan 2000 00:00:00 GMT"
500 ],
501 "transfer-encoding": [
502 "chunked"
503 ],
504 "x-xss-protection": [
505 "1; mode=block"
506 ],
507 "strict-transport-security": [
508 "max-age=0; includeSubdomains; preload"
509 ],
510 "cache-control": [
511 "no-store"
512 ],
513 "x-content-type-options": [
514 "nosniff"
515 ]
516 },
517 "status": {
518 "code": 200,
519 "message": "OK"
520 },
521 "body": {
522 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified 2\",\"revisionID\":8433},\"revisionIDFieldInfo\":{\"value\":8433,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified 2\"}]},\"error_code\":null,\"error_info\":null}"
523 }
524 },
525 "request": {
526 "headers": {
527 "content-length": [
528 "232"
529 ],
530 "content-type": [
531 "application/x-www-form-urlencoded"
532 ],
533 "user-agent": [
534 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
535 ],
536 "accept": [
537 "application/mercurial-0.1"
538 ],
539 "host": [
540 "phab.mercurial-scm.org"
541 ]
542 },
543 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+2%5Cn%5CnDifferential+Revision%3A+https%3A%2F%2Fphab.mercurial-scm.org%2FD8433%22%7D",
544 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
545 "method": "POST"
546 }
547 },
548 {
549 "response": {
550 "headers": {
551 "referrer-policy": [
552 "no-referrer"
553 ],
554 "x-frame-options": [
555 "Deny"
556 ],
557 "date": [
558 "Wed, 15 Apr 2020 23:43:58 GMT"
559 ],
560 "server": [
561 "Apache/2.4.10 (Debian)"
562 ],
563 "content-type": [
564 "application/json"
565 ],
566 "expires": [
567 "Sat, 01 Jan 2000 00:00:00 GMT"
568 ],
569 "transfer-encoding": [
570 "chunked"
571 ],
572 "x-xss-protection": [
573 "1; mode=block"
574 ],
575 "strict-transport-security": [
576 "max-age=0; includeSubdomains; preload"
577 ],
578 "cache-control": [
579 "no-store"
580 ],
581 "x-content-type-options": [
582 "nosniff"
583 ]
584 },
585 "status": {
586 "code": 200,
587 "message": "OK"
588 },
589 "body": {
590 "string": "{\"result\":{\"object\":{\"id\":8433,\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\"},\"transactions\":[]},\"error_code\":null,\"error_info\":null}"
591 }
592 },
593 "request": {
594 "headers": {
595 "content-length": [
596 "353"
597 ],
598 "content-type": [
599 "application/x-www-form-urlencoded"
600 ],
601 "user-agent": [
602 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
603 ],
604 "accept": [
605 "application/mercurial-0.1"
606 ],
607 "host": [
608 "phab.mercurial-scm.org"
609 ]
610 },
611 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+8433%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-l5ocnglddqa4hwbdzcky%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+2%22%7D%5D%7D",
612 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
613 "method": "POST"
614 }
615 },
616 {
617 "response": {
618 "headers": {
619 "referrer-policy": [
620 "no-referrer"
621 ],
622 "x-frame-options": [
623 "Deny"
624 ],
625 "date": [
626 "Wed, 15 Apr 2020 23:43:59 GMT"
627 ],
628 "server": [
629 "Apache/2.4.10 (Debian)"
630 ],
631 "content-type": [
632 "application/json"
633 ],
634 "expires": [
635 "Sat, 01 Jan 2000 00:00:00 GMT"
636 ],
637 "transfer-encoding": [
638 "chunked"
639 ],
640 "x-xss-protection": [
641 "1; mode=block"
642 ],
643 "strict-transport-security": [
644 "max-age=0; includeSubdomains; preload"
645 ],
646 "cache-control": [
647 "no-store"
648 ],
649 "x-content-type-options": [
650 "nosniff"
651 ]
652 },
653 "status": {
654 "code": 200,
655 "message": "OK"
656 },
657 "body": {
658 "string": "{\"result\":{\"data\":[{\"id\":2,\"type\":\"REPO\",\"phid\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"fields\":{\"name\":\"Mercurial\",\"vcs\":\"hg\",\"callsign\":\"HG\",\"shortName\":null,\"status\":\"active\",\"isImporting\":false,\"almanacServicePHID\":null,\"refRules\":{\"fetchRules\":[],\"trackRules\":[],\"permanentRefRules\":[]},\"spacePHID\":null,\"dateCreated\":1498761653,\"dateModified\":1500403184,\"policy\":{\"view\":\"public\",\"edit\":\"admin\",\"diffusion.push\":\"users\"}},\"attachments\":{}}],\"maps\":{},\"query\":{\"queryKey\":null},\"cursor\":{\"limit\":100,\"after\":null,\"before\":null,\"order\":null}},\"error_code\":null,\"error_info\":null}"
659 }
660 },
661 "request": {
662 "headers": {
663 "content-length": [
664 "183"
665 ],
666 "content-type": [
667 "application/x-www-form-urlencoded"
668 ],
669 "user-agent": [
670 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
671 ],
672 "accept": [
673 "application/mercurial-0.1"
674 ],
675 "host": [
676 "phab.mercurial-scm.org"
677 ]
678 },
679 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22constraints%22%3A+%7B%22callsigns%22%3A+%5B%22HG%22%5D%7D%7D",
680 "uri": "https://phab.mercurial-scm.org//api/diffusion.repository.search",
681 "method": "POST"
682 }
683 },
684 {
685 "response": {
686 "headers": {
687 "referrer-policy": [
688 "no-referrer"
689 ],
690 "x-frame-options": [
691 "Deny"
692 ],
693 "date": [
694 "Wed, 15 Apr 2020 23:43:59 GMT"
695 ],
696 "server": [
697 "Apache/2.4.10 (Debian)"
698 ],
699 "content-type": [
700 "application/json"
701 ],
702 "expires": [
703 "Sat, 01 Jan 2000 00:00:00 GMT"
704 ],
705 "transfer-encoding": [
706 "chunked"
707 ],
708 "x-xss-protection": [
709 "1; mode=block"
710 ],
711 "strict-transport-security": [
712 "max-age=0; includeSubdomains; preload"
713 ],
714 "cache-control": [
715 "no-store"
716 ],
717 "x-content-type-options": [
718 "nosniff"
719 ]
720 },
721 "status": {
722 "code": 200,
723 "message": "OK"
724 },
725 "body": {
726 "string": "{\"result\":{\"diffid\":21112,\"phid\":\"PHID-DIFF-5hagl525ogjltlaimw2a\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/differential\\/diff\\/21112\\/\"},\"error_code\":null,\"error_info\":null}"
727 }
728 },
729 "request": {
730 "headers": {
731 "content-length": [
732 "1166"
733 ],
734 "content-type": [
735 "application/x-www-form-urlencoded"
736 ],
737 "user-agent": [
738 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
739 ],
740 "accept": [
741 "application/mercurial-0.1"
742 ],
743 "host": [
744 "phab.mercurial-scm.org"
745 ]
746 },
747 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22bookmark%22%3A+null%2C+%22branch%22%3A+%22default%22%2C+%22changes%22%3A+%7B%22file1.txt%22%3A+%7B%22addLines%22%3A+1%2C+%22awayPaths%22%3A+%5B%5D%2C+%22commitHash%22%3A+null%2C+%22currentPath%22%3A+%22file1.txt%22%2C+%22delLines%22%3A+1%2C+%22fileType%22%3A+1%2C+%22hunks%22%3A+%5B%7B%22addLines%22%3A+1%2C+%22corpus%22%3A+%22-mod2%5Cn%2Bobsolete%5Cn%22%2C+%22delLines%22%3A+1%2C+%22newLength%22%3A+1%2C+%22newOffset%22%3A+1%2C+%22oldLength%22%3A+1%2C+%22oldOffset%22%3A+1%7D%5D%2C+%22metadata%22%3A+%7B%7D%2C+%22newProperties%22%3A+%7B%7D%2C+%22oldPath%22%3A+%22file1.txt%22%2C+%22oldProperties%22%3A+%7B%7D%2C+%22type%22%3A+2%7D%7D%2C+%22creationMethod%22%3A+%22phabsend%22%2C+%22lintStatus%22%3A+%22none%22%2C+%22repositoryPHID%22%3A+%22PHID-REPO-bvunnehri4u2isyr7bc3%22%2C+%22sourceControlBaseRevision%22%3A+%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%22%2C+%22sourceControlPath%22%3A+%22%2F%22%2C+%22sourceControlSystem%22%3A+%22hg%22%2C+%22sourceMachine%22%3A+%22%22%2C+%22sourcePath%22%3A+%22%2F%22%2C+%22unitStatus%22%3A+%22none%22%7D",
748 "uri": "https://phab.mercurial-scm.org//api/differential.creatediff",
749 "method": "POST"
750 }
751 },
752 {
753 "response": {
754 "headers": {
755 "referrer-policy": [
756 "no-referrer"
757 ],
758 "x-frame-options": [
759 "Deny"
760 ],
761 "date": [
762 "Wed, 15 Apr 2020 23:44:00 GMT"
763 ],
764 "server": [
765 "Apache/2.4.10 (Debian)"
766 ],
767 "content-type": [
768 "application/json"
769 ],
770 "expires": [
771 "Sat, 01 Jan 2000 00:00:00 GMT"
772 ],
773 "transfer-encoding": [
774 "chunked"
775 ],
776 "x-xss-protection": [
777 "1; mode=block"
778 ],
779 "strict-transport-security": [
780 "max-age=0; includeSubdomains; preload"
781 ],
782 "cache-control": [
783 "no-store"
784 ],
785 "x-content-type-options": [
786 "nosniff"
787 ]
788 },
789 "status": {
790 "code": 200,
791 "message": "OK"
792 },
793 "body": {
794 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
795 }
796 },
797 "request": {
798 "headers": {
799 "content-length": [
800 "482"
801 ],
802 "content-type": [
803 "application/x-www-form-urlencoded"
804 ],
805 "user-agent": [
806 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
807 ],
808 "accept": [
809 "application/mercurial-0.1"
810 ],
811 "host": [
812 "phab.mercurial-scm.org"
813 ]
814 },
815 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
816 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
817 "method": "POST"
818 }
819 },
820 {
821 "response": {
822 "headers": {
823 "referrer-policy": [
824 "no-referrer"
825 ],
826 "x-frame-options": [
827 "Deny"
828 ],
829 "date": [
830 "Wed, 15 Apr 2020 23:44:00 GMT"
831 ],
832 "server": [
833 "Apache/2.4.10 (Debian)"
834 ],
835 "content-type": [
836 "application/json"
837 ],
838 "expires": [
839 "Sat, 01 Jan 2000 00:00:00 GMT"
840 ],
841 "transfer-encoding": [
842 "chunked"
843 ],
844 "x-xss-protection": [
845 "1; mode=block"
846 ],
847 "strict-transport-security": [
848 "max-age=0; includeSubdomains; preload"
849 ],
850 "cache-control": [
851 "no-store"
852 ],
853 "x-content-type-options": [
854 "nosniff"
855 ]
856 },
857 "status": {
858 "code": 200,
859 "message": "OK"
860 },
861 "body": {
862 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
863 }
864 },
865 "request": {
866 "headers": {
867 "content-length": [
868 "594"
869 ],
870 "content-type": [
871 "application/x-www-form-urlencoded"
872 ],
873 "user-agent": [
874 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
875 ],
876 "accept": [
877 "application/mercurial-0.1"
878 ],
879 "host": [
880 "phab.mercurial-scm.org"
881 ]
882 },
883 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22082be6c9415073eee91d1f5b330b93e1fefd7627%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22local%3Acommits%22%7D",
884 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
885 "method": "POST"
886 }
887 },
888 {
889 "response": {
890 "headers": {
891 "referrer-policy": [
892 "no-referrer"
893 ],
894 "x-frame-options": [
895 "Deny"
896 ],
897 "date": [
898 "Wed, 15 Apr 2020 23:44:01 GMT"
899 ],
900 "server": [
901 "Apache/2.4.10 (Debian)"
902 ],
903 "content-type": [
904 "application/json"
905 ],
906 "expires": [
907 "Sat, 01 Jan 2000 00:00:00 GMT"
908 ],
909 "transfer-encoding": [
910 "chunked"
911 ],
912 "x-xss-protection": [
913 "1; mode=block"
914 ],
915 "strict-transport-security": [
916 "max-age=0; includeSubdomains; preload"
917 ],
918 "cache-control": [
919 "no-store"
920 ],
921 "x-content-type-options": [
922 "nosniff"
923 ]
924 },
925 "status": {
926 "code": 200,
927 "message": "OK"
928 },
929 "body": {
930 "string": "{\"result\":{\"errors\":[],\"fields\":{\"title\":\"modified A\"},\"revisionIDFieldInfo\":{\"value\":null,\"validDomain\":\"https:\\/\\/phab.mercurial-scm.org\"},\"transactions\":[{\"type\":\"title\",\"value\":\"modified A\"}]},\"error_code\":null,\"error_info\":null}"
931 }
932 },
933 "request": {
934 "headers": {
935 "content-length": [
936 "155"
937 ],
938 "content-type": [
939 "application/x-www-form-urlencoded"
940 ],
941 "user-agent": [
942 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
943 ],
944 "accept": [
945 "application/mercurial-0.1"
946 ],
947 "host": [
948 "phab.mercurial-scm.org"
949 ]
950 },
951 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22corpus%22%3A+%22modified+A%22%7D",
952 "uri": "https://phab.mercurial-scm.org//api/differential.parsecommitmessage",
953 "method": "POST"
954 }
955 },
956 {
957 "response": {
958 "headers": {
959 "referrer-policy": [
960 "no-referrer"
961 ],
962 "x-frame-options": [
963 "Deny"
964 ],
965 "date": [
966 "Wed, 15 Apr 2020 23:44:01 GMT"
967 ],
968 "server": [
969 "Apache/2.4.10 (Debian)"
970 ],
971 "content-type": [
972 "application/json"
973 ],
974 "expires": [
975 "Sat, 01 Jan 2000 00:00:00 GMT"
976 ],
977 "transfer-encoding": [
978 "chunked"
979 ],
980 "x-xss-protection": [
981 "1; mode=block"
982 ],
983 "strict-transport-security": [
984 "max-age=0; includeSubdomains; preload"
985 ],
986 "cache-control": [
987 "no-store"
988 ],
989 "x-content-type-options": [
990 "nosniff"
991 ]
992 },
993 "status": {
994 "code": 200,
995 "message": "OK"
996 },
997 "body": {
998 "string": "{\"result\":{\"object\":{\"id\":8435,\"phid\":\"PHID-DREV-wn3f4ni4p5n6juwqeskr\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-6pr73p6guxsdz4p\"},{\"phid\":\"PHID-XACT-DREV-qpigriq2bzc6xgf\"},{\"phid\":\"PHID-XACT-DREV-czslffarxz4pbmz\"},{\"phid\":\"PHID-XACT-DREV-4ahvaedacwbzzyv\"},{\"phid\":\"PHID-XACT-DREV-kmjjhdw47orongi\"},{\"phid\":\"PHID-XACT-DREV-65ahhu2fov5rwog\"}]},\"error_code\":null,\"error_info\":null}"
999 }
1000 },
1001 "request": {
1002 "headers": {
1003 "content-length": [
1004 "410"
1005 ],
1006 "content-type": [
1007 "application/x-www-form-urlencoded"
1008 ],
1009 "user-agent": [
1010 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
1011 ],
1012 "accept": [
1013 "application/mercurial-0.1"
1014 ],
1015 "host": [
1016 "phab.mercurial-scm.org"
1017 ]
1018 },
1019 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22update%22%2C+%22value%22%3A+%22PHID-DIFF-5hagl525ogjltlaimw2a%22%7D%2C+%7B%22type%22%3A+%22parents.set%22%2C+%22value%22%3A+%5B%22PHID-DREV-kpkwhtylyxrzikfspl5r%22%5D%7D%2C+%7B%22type%22%3A+%22title%22%2C+%22value%22%3A+%22modified+A%22%7D%5D%7D",
1020 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
1021 "method": "POST"
1022 }
1023 },
1024 {
1025 "response": {
1026 "headers": {
1027 "referrer-policy": [
1028 "no-referrer"
1029 ],
1030 "x-frame-options": [
1031 "Deny"
1032 ],
1033 "date": [
1034 "Wed, 15 Apr 2020 23:44:02 GMT"
1035 ],
1036 "server": [
1037 "Apache/2.4.10 (Debian)"
1038 ],
1039 "content-type": [
1040 "application/json"
1041 ],
1042 "expires": [
1043 "Sat, 01 Jan 2000 00:00:00 GMT"
1044 ],
1045 "transfer-encoding": [
1046 "chunked"
1047 ],
1048 "x-xss-protection": [
1049 "1; mode=block"
1050 ],
1051 "strict-transport-security": [
1052 "max-age=0; includeSubdomains; preload"
1053 ],
1054 "cache-control": [
1055 "no-store"
1056 ],
1057 "x-content-type-options": [
1058 "nosniff"
1059 ]
1060 },
1061 "status": {
1062 "code": 200,
1063 "message": "OK"
1064 },
1065 "body": {
1066 "string": "{\"result\":[{\"id\":\"8435\",\"phid\":\"PHID-DREV-wn3f4ni4p5n6juwqeskr\",\"title\":\"modified A\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8435\",\"dateCreated\":\"1586994241\",\"dateModified\":\"1586994241\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-5hagl525ogjltlaimw2a\",\"diffs\":[\"21112\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-kpkwhtylyxrzikfspl5r\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8433\",\"phid\":\"PHID-DREV-kpkwhtylyxrzikfspl5r\",\"title\":\"modified 2\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8433\",\"dateCreated\":\"1586994221\",\"dateModified\":\"1586994241\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-g25jdc5b5khduwpp3p3b\",\"diffs\":[\"21110\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-l5ocnglddqa4hwbdzcky\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"8434\",\"phid\":\"PHID-DREV-l5ocnglddqa4hwbdzcky\",\"title\":\"modified 1\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D8434\",\"dateCreated\":\"1586994227\",\"dateModified\":\"1586994236\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"0\",\"statusName\":\"Needs Review\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-qat4sqpqqvytzhf7rpti\",\"diffs\":[\"21111\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
1067 }
1068 },
1069 "request": {
1070 "headers": {
1071 "content-length": [
1072 "162"
1073 ],
1074 "content-type": [
1075 "application/x-www-form-urlencoded"
1076 ],
1077 "user-agent": [
1078 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
1079 ],
1080 "accept": [
1081 "application/mercurial-0.1"
1082 ],
1083 "host": [
1084 "phab.mercurial-scm.org"
1085 ]
1086 },
1087 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B8434%2C+8433%2C+8435%5D%7D",
1088 "uri": "https://phab.mercurial-scm.org//api/differential.query",
1089 "method": "POST"
1090 }
1091 },
1092 {
1093 "response": {
1094 "headers": {
1095 "referrer-policy": [
1096 "no-referrer"
1097 ],
1098 "x-frame-options": [
1099 "Deny"
1100 ],
1101 "date": [
1102 "Wed, 15 Apr 2020 23:44:02 GMT"
1103 ],
1104 "server": [
1105 "Apache/2.4.10 (Debian)"
1106 ],
1107 "content-type": [
1108 "application/json"
1109 ],
1110 "expires": [
1111 "Sat, 01 Jan 2000 00:00:00 GMT"
1112 ],
1113 "transfer-encoding": [
1114 "chunked"
1115 ],
1116 "x-xss-protection": [
1117 "1; mode=block"
1118 ],
1119 "strict-transport-security": [
1120 "max-age=0; includeSubdomains; preload"
1121 ],
1122 "cache-control": [
1123 "no-store"
1124 ],
1125 "x-content-type-options": [
1126 "nosniff"
1127 ]
1128 },
1129 "status": {
1130 "code": 200,
1131 "message": "OK"
1132 },
1133 "body": {
1134 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1135 }
1136 },
1137 "request": {
1138 "headers": {
1139 "content-length": [
1140 "482"
1141 ],
1142 "content-type": [
1143 "application/x-www-form-urlencoded"
1144 ],
1145 "user-agent": [
1146 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
1147 ],
1148 "accept": [
1149 "application/mercurial-0.1"
1150 ],
1151 "host": [
1152 "phab.mercurial-scm.org"
1153 ]
1154 },
1155 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22date%5C%22%3A+%5C%220+0%5C%22%2C+%5C%22node%5C%22%3A+%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%2C+%5C%22parent%5C%22%3A+%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%2C+%5C%22user%5C%22%3A+%5C%22test%5C%22%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22hg%3Ameta%22%7D",
1156 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1157 "method": "POST"
1158 }
1159 },
1160 {
1161 "response": {
1162 "headers": {
1163 "referrer-policy": [
1164 "no-referrer"
1165 ],
1166 "x-frame-options": [
1167 "Deny"
1168 ],
1169 "date": [
1170 "Wed, 15 Apr 2020 23:44:03 GMT"
1171 ],
1172 "server": [
1173 "Apache/2.4.10 (Debian)"
1174 ],
1175 "content-type": [
1176 "application/json"
1177 ],
1178 "expires": [
1179 "Sat, 01 Jan 2000 00:00:00 GMT"
1180 ],
1181 "transfer-encoding": [
1182 "chunked"
1183 ],
1184 "x-xss-protection": [
1185 "1; mode=block"
1186 ],
1187 "strict-transport-security": [
1188 "max-age=0; includeSubdomains; preload"
1189 ],
1190 "cache-control": [
1191 "no-store"
1192 ],
1193 "x-content-type-options": [
1194 "nosniff"
1195 ]
1196 },
1197 "status": {
1198 "code": 200,
1199 "message": "OK"
1200 },
1201 "body": {
1202 "string": "{\"result\":null,\"error_code\":null,\"error_info\":null}"
1203 }
1204 },
1205 "request": {
1206 "headers": {
1207 "content-length": [
1208 "594"
1209 ],
1210 "content-type": [
1211 "application/x-www-form-urlencoded"
1212 ],
1213 "user-agent": [
1214 "mercurial/proto-1.0 (Mercurial 5.3.2+497-112d1ad30f88)"
1215 ],
1216 "accept": [
1217 "application/mercurial-0.1"
1218 ],
1219 "host": [
1220 "phab.mercurial-scm.org"
1221 ]
1222 },
1223 "body": "output=json&__conduit__=1&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22data%22%3A+%22%7B%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%3A+%7B%5C%22author%5C%22%3A+%5C%22test%5C%22%2C+%5C%22authorEmail%5C%22%3A+%5C%22test%5C%22%2C+%5C%22branch%5C%22%3A+%5C%22default%5C%22%2C+%5C%22commit%5C%22%3A+%5C%22b5913193c805e46620181263d5175a36ee74c48a%5C%22%2C+%5C%22parents%5C%22%3A+%5B%5C%220c6523cb1d0f560a958bcc0f4f938c91cb1141dc%5C%22%5D%2C+%5C%22time%5C%22%3A+0%7D%7D%22%2C+%22diff_id%22%3A+21112%2C+%22name%22%3A+%22local%3Acommits%22%7D",
1224 "uri": "https://phab.mercurial-scm.org//api/differential.setdiffproperty",
1225 "method": "POST"
1226 }
1227 }
1228 ]
1229 } No newline at end of file
@@ -1,2226 +1,2272 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.debug
118 # developer config: phabricator.debug
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'debug', default=False,
120 b'phabricator', b'debug', default=False,
121 )
121 )
122 # developer config: phabricator.repophid
122 # developer config: phabricator.repophid
123 eh.configitem(
123 eh.configitem(
124 b'phabricator', b'repophid', default=None,
124 b'phabricator', b'repophid', default=None,
125 )
125 )
126 eh.configitem(
126 eh.configitem(
127 b'phabricator', b'url', default=None,
127 b'phabricator', b'url', default=None,
128 )
128 )
129 eh.configitem(
129 eh.configitem(
130 b'phabsend', b'confirm', default=False,
130 b'phabsend', b'confirm', default=False,
131 )
131 )
132 eh.configitem(
132 eh.configitem(
133 b'phabimport', b'secret', default=False,
133 b'phabimport', b'secret', default=False,
134 )
134 )
135 eh.configitem(
135 eh.configitem(
136 b'phabimport', b'obsolete', default=False,
136 b'phabimport', b'obsolete', default=False,
137 )
137 )
138
138
139 colortable = {
139 colortable = {
140 b'phabricator.action.created': b'green',
140 b'phabricator.action.created': b'green',
141 b'phabricator.action.skipped': b'magenta',
141 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.updated': b'magenta',
142 b'phabricator.action.updated': b'magenta',
143 b'phabricator.desc': b'',
143 b'phabricator.desc': b'',
144 b'phabricator.drev': b'bold',
144 b'phabricator.drev': b'bold',
145 b'phabricator.node': b'',
145 b'phabricator.node': b'',
146 b'phabricator.status.abandoned': b'magenta dim',
146 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.accepted': b'green bold',
147 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.closed': b'green',
148 b'phabricator.status.closed': b'green',
149 b'phabricator.status.needsreview': b'yellow',
149 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsrevision': b'red',
150 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.changesplanned': b'red',
151 b'phabricator.status.changesplanned': b'red',
152 }
152 }
153
153
154 _VCR_FLAGS = [
154 _VCR_FLAGS = [
155 (
155 (
156 b'',
156 b'',
157 b'test-vcr',
157 b'test-vcr',
158 b'',
158 b'',
159 _(
159 _(
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b', otherwise will mock all http requests using the specified vcr file.'
161 b', otherwise will mock all http requests using the specified vcr file.'
162 b' (ADVANCED)'
162 b' (ADVANCED)'
163 ),
163 ),
164 ),
164 ),
165 ]
165 ]
166
166
167
167
168 @eh.wrapfunction(localrepo, "loadhgrc")
168 @eh.wrapfunction(localrepo, "loadhgrc")
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """
171 """
172 result = False
172 result = False
173 arcconfig = {}
173 arcconfig = {}
174
174
175 try:
175 try:
176 # json.loads only accepts bytes from 3.6+
176 # json.loads only accepts bytes from 3.6+
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 # json.loads only returns unicode strings
178 # json.loads only returns unicode strings
179 arcconfig = pycompat.rapply(
179 arcconfig = pycompat.rapply(
180 lambda x: encoding.unitolocal(x)
180 lambda x: encoding.unitolocal(x)
181 if isinstance(x, pycompat.unicode)
181 if isinstance(x, pycompat.unicode)
182 else x,
182 else x,
183 pycompat.json_loads(rawparams),
183 pycompat.json_loads(rawparams),
184 )
184 )
185
185
186 result = True
186 result = True
187 except ValueError:
187 except ValueError:
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 except IOError:
189 except IOError:
190 pass
190 pass
191
191
192 cfg = util.sortdict()
192 cfg = util.sortdict()
193
193
194 if b"repository.callsign" in arcconfig:
194 if b"repository.callsign" in arcconfig:
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196
196
197 if b"phabricator.uri" in arcconfig:
197 if b"phabricator.uri" in arcconfig:
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199
199
200 if cfg:
200 if cfg:
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202
202
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204
204
205
205
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 fullflags = flags + _VCR_FLAGS
207 fullflags = flags + _VCR_FLAGS
208
208
209 def hgmatcher(r1, r2):
209 def hgmatcher(r1, r2):
210 if r1.uri != r2.uri or r1.method != r2.method:
210 if r1.uri != r2.uri or r1.method != r2.method:
211 return False
211 return False
212 r1params = util.urlreq.parseqs(r1.body)
212 r1params = util.urlreq.parseqs(r1.body)
213 r2params = util.urlreq.parseqs(r2.body)
213 r2params = util.urlreq.parseqs(r2.body)
214 for key in r1params:
214 for key in r1params:
215 if key not in r2params:
215 if key not in r2params:
216 return False
216 return False
217 value = r1params[key][0]
217 value = r1params[key][0]
218 # we want to compare json payloads without worrying about ordering
218 # we want to compare json payloads without worrying about ordering
219 if value.startswith(b'{') and value.endswith(b'}'):
219 if value.startswith(b'{') and value.endswith(b'}'):
220 r1json = pycompat.json_loads(value)
220 r1json = pycompat.json_loads(value)
221 r2json = pycompat.json_loads(r2params[key][0])
221 r2json = pycompat.json_loads(r2params[key][0])
222 if r1json != r2json:
222 if r1json != r2json:
223 return False
223 return False
224 elif r2params[key][0] != value:
224 elif r2params[key][0] != value:
225 return False
225 return False
226 return True
226 return True
227
227
228 def sanitiserequest(request):
228 def sanitiserequest(request):
229 request.body = re.sub(
229 request.body = re.sub(
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 )
231 )
232 return request
232 return request
233
233
234 def sanitiseresponse(response):
234 def sanitiseresponse(response):
235 if 'set-cookie' in response['headers']:
235 if 'set-cookie' in response['headers']:
236 del response['headers']['set-cookie']
236 del response['headers']['set-cookie']
237 return response
237 return response
238
238
239 def decorate(fn):
239 def decorate(fn):
240 def inner(*args, **kwargs):
240 def inner(*args, **kwargs):
241 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
241 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
242 if cassette:
242 if cassette:
243 import hgdemandimport
243 import hgdemandimport
244
244
245 with hgdemandimport.deactivated():
245 with hgdemandimport.deactivated():
246 import vcr as vcrmod
246 import vcr as vcrmod
247 import vcr.stubs as stubs
247 import vcr.stubs as stubs
248
248
249 vcr = vcrmod.VCR(
249 vcr = vcrmod.VCR(
250 serializer='json',
250 serializer='json',
251 before_record_request=sanitiserequest,
251 before_record_request=sanitiserequest,
252 before_record_response=sanitiseresponse,
252 before_record_response=sanitiseresponse,
253 custom_patches=[
253 custom_patches=[
254 (
254 (
255 urlmod,
255 urlmod,
256 'httpconnection',
256 'httpconnection',
257 stubs.VCRHTTPConnection,
257 stubs.VCRHTTPConnection,
258 ),
258 ),
259 (
259 (
260 urlmod,
260 urlmod,
261 'httpsconnection',
261 'httpsconnection',
262 stubs.VCRHTTPSConnection,
262 stubs.VCRHTTPSConnection,
263 ),
263 ),
264 ],
264 ],
265 )
265 )
266 vcr.register_matcher('hgmatcher', hgmatcher)
266 vcr.register_matcher('hgmatcher', hgmatcher)
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
268 return fn(*args, **kwargs)
268 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
270
270
271 cmd = util.checksignature(inner, depth=2)
271 cmd = util.checksignature(inner, depth=2)
272 cmd.__name__ = fn.__name__
272 cmd.__name__ = fn.__name__
273 cmd.__doc__ = fn.__doc__
273 cmd.__doc__ = fn.__doc__
274
274
275 return command(
275 return command(
276 name,
276 name,
277 fullflags,
277 fullflags,
278 spec,
278 spec,
279 helpcategory=helpcategory,
279 helpcategory=helpcategory,
280 optionalrepo=optionalrepo,
280 optionalrepo=optionalrepo,
281 )(cmd)
281 )(cmd)
282
282
283 return decorate
283 return decorate
284
284
285
285
286 def _debug(ui, *msg, **opts):
286 def _debug(ui, *msg, **opts):
287 """write debug output for Phabricator if ``phabricator.debug`` is set
287 """write debug output for Phabricator if ``phabricator.debug`` is set
288
288
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 printed with the --debug argument.
290 printed with the --debug argument.
291 """
291 """
292 if ui.configbool(b"phabricator", b"debug"):
292 if ui.configbool(b"phabricator", b"debug"):
293 flag = ui.debugflag
293 flag = ui.debugflag
294 try:
294 try:
295 ui.debugflag = True
295 ui.debugflag = True
296 ui.write(*msg, **opts)
296 ui.write(*msg, **opts)
297 finally:
297 finally:
298 ui.debugflag = flag
298 ui.debugflag = flag
299
299
300
300
301 def urlencodenested(params):
301 def urlencodenested(params):
302 """like urlencode, but works with nested parameters.
302 """like urlencode, but works with nested parameters.
303
303
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
307 """
307 """
308 flatparams = util.sortdict()
308 flatparams = util.sortdict()
309
309
310 def process(prefix, obj):
310 def process(prefix, obj):
311 if isinstance(obj, bool):
311 if isinstance(obj, bool):
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
315 if items is None:
315 if items is None:
316 flatparams[prefix] = obj
316 flatparams[prefix] = obj
317 else:
317 else:
318 for k, v in items(obj):
318 for k, v in items(obj):
319 if prefix:
319 if prefix:
320 process(b'%s[%s]' % (prefix, k), v)
320 process(b'%s[%s]' % (prefix, k), v)
321 else:
321 else:
322 process(k, v)
322 process(k, v)
323
323
324 process(b'', params)
324 process(b'', params)
325 return util.urlreq.urlencode(flatparams)
325 return util.urlreq.urlencode(flatparams)
326
326
327
327
328 def readurltoken(ui):
328 def readurltoken(ui):
329 """return conduit url, token and make sure they exist
329 """return conduit url, token and make sure they exist
330
330
331 Currently read from [auth] config section. In the future, it might
331 Currently read from [auth] config section. In the future, it might
332 make sense to read from .arcconfig and .arcrc as well.
332 make sense to read from .arcconfig and .arcrc as well.
333 """
333 """
334 url = ui.config(b'phabricator', b'url')
334 url = ui.config(b'phabricator', b'url')
335 if not url:
335 if not url:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
338 )
338 )
339
339
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
341 token = None
341 token = None
342
342
343 if res:
343 if res:
344 group, auth = res
344 group, auth = res
345
345
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
347
347
348 token = auth.get(b'phabtoken')
348 token = auth.get(b'phabtoken')
349
349
350 if not token:
350 if not token:
351 raise error.Abort(
351 raise error.Abort(
352 _(b'Can\'t find conduit token associated to %s') % (url,)
352 _(b'Can\'t find conduit token associated to %s') % (url,)
353 )
353 )
354
354
355 return url, token
355 return url, token
356
356
357
357
358 def callconduit(ui, name, params):
358 def callconduit(ui, name, params):
359 """call Conduit API, params is a dict. return json.loads result, or None"""
359 """call Conduit API, params is a dict. return json.loads result, or None"""
360 host, token = readurltoken(ui)
360 host, token = readurltoken(ui)
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
363 params = params.copy()
363 params = params.copy()
364 params[b'__conduit__'] = {
364 params[b'__conduit__'] = {
365 b'token': token,
365 b'token': token,
366 }
366 }
367 rawdata = {
367 rawdata = {
368 b'params': templatefilters.json(params),
368 b'params': templatefilters.json(params),
369 b'output': b'json',
369 b'output': b'json',
370 b'__conduit__': 1,
370 b'__conduit__': 1,
371 }
371 }
372 data = urlencodenested(rawdata)
372 data = urlencodenested(rawdata)
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
374 if curlcmd:
374 if curlcmd:
375 sin, sout = procutil.popen2(
375 sin, sout = procutil.popen2(
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
377 )
377 )
378 sin.write(data)
378 sin.write(data)
379 sin.close()
379 sin.close()
380 body = sout.read()
380 body = sout.read()
381 else:
381 else:
382 urlopener = urlmod.opener(ui, authinfo)
382 urlopener = urlmod.opener(ui, authinfo)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
384 with contextlib.closing(urlopener.open(request)) as rsp:
384 with contextlib.closing(urlopener.open(request)) as rsp:
385 body = rsp.read()
385 body = rsp.read()
386 ui.debug(b'Conduit Response: %s\n' % body)
386 ui.debug(b'Conduit Response: %s\n' % body)
387 parsed = pycompat.rapply(
387 parsed = pycompat.rapply(
388 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
389 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
390 else x,
390 else x,
391 # json.loads only accepts bytes from py3.6+
391 # json.loads only accepts bytes from py3.6+
392 pycompat.json_loads(encoding.unifromlocal(body)),
392 pycompat.json_loads(encoding.unifromlocal(body)),
393 )
393 )
394 if parsed.get(b'error_code'):
394 if parsed.get(b'error_code'):
395 msg = _(b'Conduit Error (%s): %s') % (
395 msg = _(b'Conduit Error (%s): %s') % (
396 parsed[b'error_code'],
396 parsed[b'error_code'],
397 parsed[b'error_info'],
397 parsed[b'error_info'],
398 )
398 )
399 raise error.Abort(msg)
399 raise error.Abort(msg)
400 return parsed[b'result']
400 return parsed[b'result']
401
401
402
402
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
404 def debugcallconduit(ui, repo, name):
404 def debugcallconduit(ui, repo, name):
405 """call Conduit API
405 """call Conduit API
406
406
407 Call parameters are read from stdin as a JSON blob. Result will be written
407 Call parameters are read from stdin as a JSON blob. Result will be written
408 to stdout as a JSON blob.
408 to stdout as a JSON blob.
409 """
409 """
410 # json.loads only accepts bytes from 3.6+
410 # json.loads only accepts bytes from 3.6+
411 rawparams = encoding.unifromlocal(ui.fin.read())
411 rawparams = encoding.unifromlocal(ui.fin.read())
412 # json.loads only returns unicode strings
412 # json.loads only returns unicode strings
413 params = pycompat.rapply(
413 params = pycompat.rapply(
414 lambda x: encoding.unitolocal(x)
414 lambda x: encoding.unitolocal(x)
415 if isinstance(x, pycompat.unicode)
415 if isinstance(x, pycompat.unicode)
416 else x,
416 else x,
417 pycompat.json_loads(rawparams),
417 pycompat.json_loads(rawparams),
418 )
418 )
419 # json.dumps only accepts unicode strings
419 # json.dumps only accepts unicode strings
420 result = pycompat.rapply(
420 result = pycompat.rapply(
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
422 callconduit(ui, name, params),
422 callconduit(ui, name, params),
423 )
423 )
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
426
426
427
427
428 def getrepophid(repo):
428 def getrepophid(repo):
429 """given callsign, return repository PHID or None"""
429 """given callsign, return repository PHID or None"""
430 # developer config: phabricator.repophid
430 # developer config: phabricator.repophid
431 repophid = repo.ui.config(b'phabricator', b'repophid')
431 repophid = repo.ui.config(b'phabricator', b'repophid')
432 if repophid:
432 if repophid:
433 return repophid
433 return repophid
434 callsign = repo.ui.config(b'phabricator', b'callsign')
434 callsign = repo.ui.config(b'phabricator', b'callsign')
435 if not callsign:
435 if not callsign:
436 return None
436 return None
437 query = callconduit(
437 query = callconduit(
438 repo.ui,
438 repo.ui,
439 b'diffusion.repository.search',
439 b'diffusion.repository.search',
440 {b'constraints': {b'callsigns': [callsign]}},
440 {b'constraints': {b'callsigns': [callsign]}},
441 )
441 )
442 if len(query[b'data']) == 0:
442 if len(query[b'data']) == 0:
443 return None
443 return None
444 repophid = query[b'data'][0][b'phid']
444 repophid = query[b'data'][0][b'phid']
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
446 return repophid
446 return repophid
447
447
448
448
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
450 _differentialrevisiondescre = re.compile(
450 _differentialrevisiondescre = re.compile(
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
452 )
452 )
453
453
454
454
455 def getoldnodedrevmap(repo, nodelist):
455 def getoldnodedrevmap(repo, nodelist):
456 """find previous nodes that has been sent to Phabricator
456 """find previous nodes that has been sent to Phabricator
457
457
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
459 for node in nodelist with known previous sent versions, or associated
459 for node in nodelist with known previous sent versions, or associated
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
461 be ``None``.
461 be ``None``.
462
462
463 Examines commit messages like "Differential Revision:" to get the
463 Examines commit messages like "Differential Revision:" to get the
464 association information.
464 association information.
465
465
466 If such commit message line is not found, examines all precursors and their
466 If such commit message line is not found, examines all precursors and their
467 tags. Tags with format like "D1234" are considered a match and the node
467 tags. Tags with format like "D1234" are considered a match and the node
468 with that tag, and the number after "D" (ex. 1234) will be returned.
468 with that tag, and the number after "D" (ex. 1234) will be returned.
469
469
470 The ``old node``, if not None, is guaranteed to be the last diff of
470 The ``old node``, if not None, is guaranteed to be the last diff of
471 corresponding Differential Revision, and exist in the repo.
471 corresponding Differential Revision, and exist in the repo.
472 """
472 """
473 unfi = repo.unfiltered()
473 unfi = repo.unfiltered()
474 has_node = unfi.changelog.index.has_node
474 has_node = unfi.changelog.index.has_node
475
475
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
477 # ordered for test stability when printing new -> old mapping below
477 # ordered for test stability when printing new -> old mapping below
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
479 for node in nodelist:
479 for node in nodelist:
480 ctx = unfi[node]
480 ctx = unfi[node]
481 # For tags like "D123", put them into "toconfirm" to verify later
481 # For tags like "D123", put them into "toconfirm" to verify later
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
483 for n in precnodes:
483 for n in precnodes:
484 if has_node(n):
484 if has_node(n):
485 for tag in unfi.nodetags(n):
485 for tag in unfi.nodetags(n):
486 m = _differentialrevisiontagre.match(tag)
486 m = _differentialrevisiontagre.match(tag)
487 if m:
487 if m:
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
489 break
489 break
490 else:
490 else:
491 continue # move to next predecessor
491 continue # move to next predecessor
492 break # found a tag, stop
492 break # found a tag, stop
493 else:
493 else:
494 # Check commit message
494 # Check commit message
495 m = _differentialrevisiondescre.search(ctx.description())
495 m = _differentialrevisiondescre.search(ctx.description())
496 if m:
496 if m:
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
498
498
499 # Double check if tags are genuine by collecting all old nodes from
499 # Double check if tags are genuine by collecting all old nodes from
500 # Phabricator, and expect precursors overlap with it.
500 # Phabricator, and expect precursors overlap with it.
501 if toconfirm:
501 if toconfirm:
502 drevs = [drev for force, precs, drev in toconfirm.values()]
502 drevs = [drev for force, precs, drev in toconfirm.values()]
503 alldiffs = callconduit(
503 alldiffs = callconduit(
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
505 )
505 )
506
506
507 def getnodes(d, precset):
507 def getnodes(d, precset):
508 # Ignore other nodes that were combined into the Differential
508 # Ignore other nodes that were combined into the Differential
509 # that aren't predecessors of the current local node.
509 # that aren't predecessors of the current local node.
510 return [n for n in getlocalcommits(d) if n in precset]
510 return [n for n in getlocalcommits(d) if n in precset]
511
511
512 for newnode, (force, precset, drev) in toconfirm.items():
512 for newnode, (force, precset, drev) in toconfirm.items():
513 diffs = [
513 diffs = [
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
515 ]
515 ]
516
516
517 # local predecessors known by Phabricator
517 # local predecessors known by Phabricator
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
519
519
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
521 # and force is not set (when commit message says nothing)
521 # and force is not set (when commit message says nothing)
522 if not force and not phprecset:
522 if not force and not phprecset:
523 tagname = b'D%d' % drev
523 tagname = b'D%d' % drev
524 tags.tag(
524 tags.tag(
525 repo,
525 repo,
526 tagname,
526 tagname,
527 nullid,
527 nullid,
528 message=None,
528 message=None,
529 user=None,
529 user=None,
530 date=None,
530 date=None,
531 local=True,
531 local=True,
532 )
532 )
533 unfi.ui.warn(
533 unfi.ui.warn(
534 _(
534 _(
535 b'D%d: local tag removed - does not match '
535 b'D%d: local tag removed - does not match '
536 b'Differential history\n'
536 b'Differential history\n'
537 )
537 )
538 % drev
538 % drev
539 )
539 )
540 continue
540 continue
541
541
542 # Find the last node using Phabricator metadata, and make sure it
542 # Find the last node using Phabricator metadata, and make sure it
543 # exists in the repo
543 # exists in the repo
544 oldnode = lastdiff = None
544 oldnode = lastdiff = None
545 if diffs:
545 if diffs:
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
547 oldnodes = getnodes(lastdiff, precset)
547 oldnodes = getnodes(lastdiff, precset)
548
548
549 _debug(
549 _debug(
550 unfi.ui,
550 unfi.ui,
551 b"%s mapped to old nodes %s\n"
551 b"%s mapped to old nodes %s\n"
552 % (
552 % (
553 short(newnode),
553 short(newnode),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 ),
555 ),
556 )
556 )
557
557
558 # If this commit was the result of `hg fold` after submission,
558 # If this commit was the result of `hg fold` after submission,
559 # and now resubmitted with --fold, the easiest thing to do is
559 # and now resubmitted with --fold, the easiest thing to do is
560 # to leave the node clear. This only results in creating a new
560 # to leave the node clear. This only results in creating a new
561 # diff for the _same_ Differential Revision if this commit is
561 # diff for the _same_ Differential Revision if this commit is
562 # the first or last in the selected range. If we picked a node
562 # the first or last in the selected range. If we picked a node
563 # from the list instead, it would have to be the lowest if at
563 # from the list instead, it would have to be the lowest if at
564 # the beginning of the --fold range, or the highest at the end.
564 # the beginning of the --fold range, or the highest at the end.
565 # Otherwise, one or more of the nodes wouldn't be considered in
565 # Otherwise, one or more of the nodes wouldn't be considered in
566 # the diff, and the Differential wouldn't be properly updated.
566 # the diff, and the Differential wouldn't be properly updated.
567 # If this commit is the result of `hg split` in the same
567 # If this commit is the result of `hg split` in the same
568 # scenario, there is a single oldnode here (and multiple
568 # scenario, there is a single oldnode here (and multiple
569 # newnodes mapped to it). That makes it the same as the normal
569 # newnodes mapped to it). That makes it the same as the normal
570 # case, as the edges of the newnode range cleanly maps to one
570 # case, as the edges of the newnode range cleanly maps to one
571 # oldnode each.
571 # oldnode each.
572 if len(oldnodes) == 1:
572 if len(oldnodes) == 1:
573 oldnode = oldnodes[0]
573 oldnode = oldnodes[0]
574 if oldnode and not has_node(oldnode):
574 if oldnode and not has_node(oldnode):
575 oldnode = None
575 oldnode = None
576
576
577 result[newnode] = (oldnode, lastdiff, drev)
577 result[newnode] = (oldnode, lastdiff, drev)
578
578
579 return result
579 return result
580
580
581
581
582 def getdrevmap(repo, revs):
582 def getdrevmap(repo, revs):
583 """Return a dict mapping each rev in `revs` to their Differential Revision
583 """Return a dict mapping each rev in `revs` to their Differential Revision
584 ID or None.
584 ID or None.
585 """
585 """
586 result = {}
586 result = {}
587 for rev in revs:
587 for rev in revs:
588 result[rev] = None
588 result[rev] = None
589 ctx = repo[rev]
589 ctx = repo[rev]
590 # Check commit message
590 # Check commit message
591 m = _differentialrevisiondescre.search(ctx.description())
591 m = _differentialrevisiondescre.search(ctx.description())
592 if m:
592 if m:
593 result[rev] = int(m.group('id'))
593 result[rev] = int(m.group('id'))
594 continue
594 continue
595 # Check tags
595 # Check tags
596 for tag in repo.nodetags(ctx.node()):
596 for tag in repo.nodetags(ctx.node()):
597 m = _differentialrevisiontagre.match(tag)
597 m = _differentialrevisiontagre.match(tag)
598 if m:
598 if m:
599 result[rev] = int(m.group(1))
599 result[rev] = int(m.group(1))
600 break
600 break
601
601
602 return result
602 return result
603
603
604
604
605 def getdiff(basectx, ctx, diffopts):
605 def getdiff(basectx, ctx, diffopts):
606 """plain-text diff without header (user, commit message, etc)"""
606 """plain-text diff without header (user, commit message, etc)"""
607 output = util.stringio()
607 output = util.stringio()
608 for chunk, _label in patch.diffui(
608 for chunk, _label in patch.diffui(
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
610 ):
610 ):
611 output.write(chunk)
611 output.write(chunk)
612 return output.getvalue()
612 return output.getvalue()
613
613
614
614
615 class DiffChangeType(object):
615 class DiffChangeType(object):
616 ADD = 1
616 ADD = 1
617 CHANGE = 2
617 CHANGE = 2
618 DELETE = 3
618 DELETE = 3
619 MOVE_AWAY = 4
619 MOVE_AWAY = 4
620 COPY_AWAY = 5
620 COPY_AWAY = 5
621 MOVE_HERE = 6
621 MOVE_HERE = 6
622 COPY_HERE = 7
622 COPY_HERE = 7
623 MULTICOPY = 8
623 MULTICOPY = 8
624
624
625
625
626 class DiffFileType(object):
626 class DiffFileType(object):
627 TEXT = 1
627 TEXT = 1
628 IMAGE = 2
628 IMAGE = 2
629 BINARY = 3
629 BINARY = 3
630
630
631
631
632 @attr.s
632 @attr.s
633 class phabhunk(dict):
633 class phabhunk(dict):
634 """Represents a Differential hunk, which is owned by a Differential change
634 """Represents a Differential hunk, which is owned by a Differential change
635 """
635 """
636
636
637 oldOffset = attr.ib(default=0) # camelcase-required
637 oldOffset = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
641 corpus = attr.ib(default='')
641 corpus = attr.ib(default='')
642 # These get added to the phabchange's equivalents
642 # These get added to the phabchange's equivalents
643 addLines = attr.ib(default=0) # camelcase-required
643 addLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
645
645
646
646
647 @attr.s
647 @attr.s
648 class phabchange(object):
648 class phabchange(object):
649 """Represents a Differential change, owns Differential hunks and owned by a
649 """Represents a Differential change, owns Differential hunks and owned by a
650 Differential diff. Each one represents one file in a diff.
650 Differential diff. Each one represents one file in a diff.
651 """
651 """
652
652
653 currentPath = attr.ib(default=None) # camelcase-required
653 currentPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
656 metadata = attr.ib(default=attr.Factory(dict))
656 metadata = attr.ib(default=attr.Factory(dict))
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 type = attr.ib(default=DiffChangeType.CHANGE)
659 type = attr.ib(default=DiffChangeType.CHANGE)
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
664 hunks = attr.ib(default=attr.Factory(list))
664 hunks = attr.ib(default=attr.Factory(list))
665
665
666 def copynewmetadatatoold(self):
666 def copynewmetadatatoold(self):
667 for key in list(self.metadata.keys()):
667 for key in list(self.metadata.keys()):
668 newkey = key.replace(b'new:', b'old:')
668 newkey = key.replace(b'new:', b'old:')
669 self.metadata[newkey] = self.metadata[key]
669 self.metadata[newkey] = self.metadata[key]
670
670
671 def addoldmode(self, value):
671 def addoldmode(self, value):
672 self.oldProperties[b'unix:filemode'] = value
672 self.oldProperties[b'unix:filemode'] = value
673
673
674 def addnewmode(self, value):
674 def addnewmode(self, value):
675 self.newProperties[b'unix:filemode'] = value
675 self.newProperties[b'unix:filemode'] = value
676
676
677 def addhunk(self, hunk):
677 def addhunk(self, hunk):
678 if not isinstance(hunk, phabhunk):
678 if not isinstance(hunk, phabhunk):
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
681 # It's useful to include these stats since the Phab web UI shows them,
681 # It's useful to include these stats since the Phab web UI shows them,
682 # and uses them to estimate how large a change a Revision is. Also used
682 # and uses them to estimate how large a change a Revision is. Also used
683 # in email subjects for the [+++--] bit.
683 # in email subjects for the [+++--] bit.
684 self.addLines += hunk.addLines
684 self.addLines += hunk.addLines
685 self.delLines += hunk.delLines
685 self.delLines += hunk.delLines
686
686
687
687
688 @attr.s
688 @attr.s
689 class phabdiff(object):
689 class phabdiff(object):
690 """Represents a Differential diff, owns Differential changes. Corresponds
690 """Represents a Differential diff, owns Differential changes. Corresponds
691 to a commit.
691 to a commit.
692 """
692 """
693
693
694 # Doesn't seem to be any reason to send this (output of uname -n)
694 # Doesn't seem to be any reason to send this (output of uname -n)
695 sourceMachine = attr.ib(default=b'') # camelcase-required
695 sourceMachine = attr.ib(default=b'') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
700 branch = attr.ib(default=b'default')
700 branch = attr.ib(default=b'default')
701 bookmark = attr.ib(default=None)
701 bookmark = attr.ib(default=None)
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
705 changes = attr.ib(default=attr.Factory(dict))
705 changes = attr.ib(default=attr.Factory(dict))
706 repositoryPHID = attr.ib(default=None) # camelcase-required
706 repositoryPHID = attr.ib(default=None) # camelcase-required
707
707
708 def addchange(self, change):
708 def addchange(self, change):
709 if not isinstance(change, phabchange):
709 if not isinstance(change, phabchange):
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
711 self.changes[change.currentPath] = pycompat.byteskwargs(
711 self.changes[change.currentPath] = pycompat.byteskwargs(
712 attr.asdict(change)
712 attr.asdict(change)
713 )
713 )
714
714
715
715
716 def maketext(pchange, basectx, ctx, fname):
716 def maketext(pchange, basectx, ctx, fname):
717 """populate the phabchange for a text file"""
717 """populate the phabchange for a text file"""
718 repo = ctx.repo()
718 repo = ctx.repo()
719 fmatcher = match.exact([fname])
719 fmatcher = match.exact([fname])
720 diffopts = mdiff.diffopts(git=True, context=32767)
720 diffopts = mdiff.diffopts(git=True, context=32767)
721 _pfctx, _fctx, header, fhunks = next(
721 _pfctx, _fctx, header, fhunks = next(
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
723 )
723 )
724
724
725 for fhunk in fhunks:
725 for fhunk in fhunks:
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
727 corpus = b''.join(lines[1:])
727 corpus = b''.join(lines[1:])
728 shunk = list(header)
728 shunk = list(header)
729 shunk.extend(lines)
729 shunk.extend(lines)
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
731 patch.diffstatdata(util.iterlines(shunk))
731 patch.diffstatdata(util.iterlines(shunk))
732 )
732 )
733 pchange.addhunk(
733 pchange.addhunk(
734 phabhunk(
734 phabhunk(
735 oldOffset,
735 oldOffset,
736 oldLength,
736 oldLength,
737 newOffset,
737 newOffset,
738 newLength,
738 newLength,
739 corpus,
739 corpus,
740 addLines,
740 addLines,
741 delLines,
741 delLines,
742 )
742 )
743 )
743 )
744
744
745
745
746 def uploadchunks(fctx, fphid):
746 def uploadchunks(fctx, fphid):
747 """upload large binary files as separate chunks.
747 """upload large binary files as separate chunks.
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
749 """
749 """
750 ui = fctx.repo().ui
750 ui = fctx.repo().ui
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
752 with ui.makeprogress(
752 with ui.makeprogress(
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
754 ) as progress:
754 ) as progress:
755 for chunk in chunks:
755 for chunk in chunks:
756 progress.increment()
756 progress.increment()
757 if chunk[b'complete']:
757 if chunk[b'complete']:
758 continue
758 continue
759 bstart = int(chunk[b'byteStart'])
759 bstart = int(chunk[b'byteStart'])
760 bend = int(chunk[b'byteEnd'])
760 bend = int(chunk[b'byteEnd'])
761 callconduit(
761 callconduit(
762 ui,
762 ui,
763 b'file.uploadchunk',
763 b'file.uploadchunk',
764 {
764 {
765 b'filePHID': fphid,
765 b'filePHID': fphid,
766 b'byteStart': bstart,
766 b'byteStart': bstart,
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
768 b'dataEncoding': b'base64',
768 b'dataEncoding': b'base64',
769 },
769 },
770 )
770 )
771
771
772
772
773 def uploadfile(fctx):
773 def uploadfile(fctx):
774 """upload binary files to Phabricator"""
774 """upload binary files to Phabricator"""
775 repo = fctx.repo()
775 repo = fctx.repo()
776 ui = repo.ui
776 ui = repo.ui
777 fname = fctx.path()
777 fname = fctx.path()
778 size = fctx.size()
778 size = fctx.size()
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
780
780
781 # an allocate call is required first to see if an upload is even required
781 # an allocate call is required first to see if an upload is even required
782 # (Phab might already have it) and to determine if chunking is needed
782 # (Phab might already have it) and to determine if chunking is needed
783 allocateparams = {
783 allocateparams = {
784 b'name': fname,
784 b'name': fname,
785 b'contentLength': size,
785 b'contentLength': size,
786 b'contentHash': fhash,
786 b'contentHash': fhash,
787 }
787 }
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
789 fphid = filealloc[b'filePHID']
789 fphid = filealloc[b'filePHID']
790
790
791 if filealloc[b'upload']:
791 if filealloc[b'upload']:
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
793 if not fphid:
793 if not fphid:
794 uploadparams = {
794 uploadparams = {
795 b'name': fname,
795 b'name': fname,
796 b'data_base64': base64.b64encode(fctx.data()),
796 b'data_base64': base64.b64encode(fctx.data()),
797 }
797 }
798 fphid = callconduit(ui, b'file.upload', uploadparams)
798 fphid = callconduit(ui, b'file.upload', uploadparams)
799 else:
799 else:
800 uploadchunks(fctx, fphid)
800 uploadchunks(fctx, fphid)
801 else:
801 else:
802 ui.debug(b'server already has %s\n' % bytes(fctx))
802 ui.debug(b'server already has %s\n' % bytes(fctx))
803
803
804 if not fphid:
804 if not fphid:
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
806
806
807 return fphid
807 return fphid
808
808
809
809
810 def addoldbinary(pchange, oldfctx, fctx):
810 def addoldbinary(pchange, oldfctx, fctx):
811 """add the metadata for the previous version of a binary file to the
811 """add the metadata for the previous version of a binary file to the
812 phabchange for the new version
812 phabchange for the new version
813
813
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
815 version of the file, or None if the file is being removed.
815 version of the file, or None if the file is being removed.
816 """
816 """
817 if not fctx or fctx.cmp(oldfctx):
817 if not fctx or fctx.cmp(oldfctx):
818 # Files differ, add the old one
818 # Files differ, add the old one
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
820 mimeguess, _enc = mimetypes.guess_type(
820 mimeguess, _enc = mimetypes.guess_type(
821 encoding.unifromlocal(oldfctx.path())
821 encoding.unifromlocal(oldfctx.path())
822 )
822 )
823 if mimeguess:
823 if mimeguess:
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
825 mimeguess
825 mimeguess
826 )
826 )
827 fphid = uploadfile(oldfctx)
827 fphid = uploadfile(oldfctx)
828 pchange.metadata[b'old:binary-phid'] = fphid
828 pchange.metadata[b'old:binary-phid'] = fphid
829 else:
829 else:
830 # If it's left as IMAGE/BINARY web UI might try to display it
830 # If it's left as IMAGE/BINARY web UI might try to display it
831 pchange.fileType = DiffFileType.TEXT
831 pchange.fileType = DiffFileType.TEXT
832 pchange.copynewmetadatatoold()
832 pchange.copynewmetadatatoold()
833
833
834
834
835 def makebinary(pchange, fctx):
835 def makebinary(pchange, fctx):
836 """populate the phabchange for a binary file"""
836 """populate the phabchange for a binary file"""
837 pchange.fileType = DiffFileType.BINARY
837 pchange.fileType = DiffFileType.BINARY
838 fphid = uploadfile(fctx)
838 fphid = uploadfile(fctx)
839 pchange.metadata[b'new:binary-phid'] = fphid
839 pchange.metadata[b'new:binary-phid'] = fphid
840 pchange.metadata[b'new:file:size'] = fctx.size()
840 pchange.metadata[b'new:file:size'] = fctx.size()
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
842 if mimeguess:
842 if mimeguess:
843 mimeguess = pycompat.bytestr(mimeguess)
843 mimeguess = pycompat.bytestr(mimeguess)
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
845 if mimeguess.startswith(b'image/'):
845 if mimeguess.startswith(b'image/'):
846 pchange.fileType = DiffFileType.IMAGE
846 pchange.fileType = DiffFileType.IMAGE
847
847
848
848
849 # Copied from mercurial/patch.py
849 # Copied from mercurial/patch.py
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
851
851
852
852
853 def notutf8(fctx):
853 def notutf8(fctx):
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
855 as binary
855 as binary
856 """
856 """
857 try:
857 try:
858 fctx.data().decode('utf-8')
858 fctx.data().decode('utf-8')
859 return False
859 return False
860 except UnicodeDecodeError:
860 except UnicodeDecodeError:
861 fctx.repo().ui.write(
861 fctx.repo().ui.write(
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
863 % fctx.path()
863 % fctx.path()
864 )
864 )
865 return True
865 return True
866
866
867
867
868 def addremoved(pdiff, basectx, ctx, removed):
868 def addremoved(pdiff, basectx, ctx, removed):
869 """add removed files to the phabdiff. Shouldn't include moves"""
869 """add removed files to the phabdiff. Shouldn't include moves"""
870 for fname in removed:
870 for fname in removed:
871 pchange = phabchange(
871 pchange = phabchange(
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
873 )
873 )
874 oldfctx = basectx.p1()[fname]
874 oldfctx = basectx.p1()[fname]
875 pchange.addoldmode(gitmode[oldfctx.flags()])
875 pchange.addoldmode(gitmode[oldfctx.flags()])
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
877 maketext(pchange, basectx, ctx, fname)
877 maketext(pchange, basectx, ctx, fname)
878
878
879 pdiff.addchange(pchange)
879 pdiff.addchange(pchange)
880
880
881
881
882 def addmodified(pdiff, basectx, ctx, modified):
882 def addmodified(pdiff, basectx, ctx, modified):
883 """add modified files to the phabdiff"""
883 """add modified files to the phabdiff"""
884 for fname in modified:
884 for fname in modified:
885 fctx = ctx[fname]
885 fctx = ctx[fname]
886 oldfctx = basectx.p1()[fname]
886 oldfctx = basectx.p1()[fname]
887 pchange = phabchange(currentPath=fname, oldPath=fname)
887 pchange = phabchange(currentPath=fname, oldPath=fname)
888 filemode = gitmode[fctx.flags()]
888 filemode = gitmode[fctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
890 if filemode != originalmode:
890 if filemode != originalmode:
891 pchange.addoldmode(originalmode)
891 pchange.addoldmode(originalmode)
892 pchange.addnewmode(filemode)
892 pchange.addnewmode(filemode)
893
893
894 if (
894 if (
895 fctx.isbinary()
895 fctx.isbinary()
896 or notutf8(fctx)
896 or notutf8(fctx)
897 or oldfctx.isbinary()
897 or oldfctx.isbinary()
898 or notutf8(oldfctx)
898 or notutf8(oldfctx)
899 ):
899 ):
900 makebinary(pchange, fctx)
900 makebinary(pchange, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
902 else:
902 else:
903 maketext(pchange, basectx, ctx, fname)
903 maketext(pchange, basectx, ctx, fname)
904
904
905 pdiff.addchange(pchange)
905 pdiff.addchange(pchange)
906
906
907
907
908 def addadded(pdiff, basectx, ctx, added, removed):
908 def addadded(pdiff, basectx, ctx, added, removed):
909 """add file adds to the phabdiff, both new files and copies/moves"""
909 """add file adds to the phabdiff, both new files and copies/moves"""
910 # Keep track of files that've been recorded as moved/copied, so if there are
910 # Keep track of files that've been recorded as moved/copied, so if there are
911 # additional copies we can mark them (moves get removed from removed)
911 # additional copies we can mark them (moves get removed from removed)
912 copiedchanges = {}
912 copiedchanges = {}
913 movedchanges = {}
913 movedchanges = {}
914
914
915 copy = {}
915 copy = {}
916 if basectx != ctx:
916 if basectx != ctx:
917 copy = copies.pathcopies(basectx.p1(), ctx)
917 copy = copies.pathcopies(basectx.p1(), ctx)
918
918
919 for fname in added:
919 for fname in added:
920 fctx = ctx[fname]
920 fctx = ctx[fname]
921 oldfctx = None
921 oldfctx = None
922 pchange = phabchange(currentPath=fname)
922 pchange = phabchange(currentPath=fname)
923
923
924 filemode = gitmode[fctx.flags()]
924 filemode = gitmode[fctx.flags()]
925
925
926 if copy:
926 if copy:
927 originalfname = copy.get(fname, fname)
927 originalfname = copy.get(fname, fname)
928 else:
928 else:
929 originalfname = fname
929 originalfname = fname
930 if fctx.renamed():
930 if fctx.renamed():
931 originalfname = fctx.renamed()[0]
931 originalfname = fctx.renamed()[0]
932
932
933 renamed = fname != originalfname
933 renamed = fname != originalfname
934
934
935 if renamed:
935 if renamed:
936 oldfctx = basectx.p1()[originalfname]
936 oldfctx = basectx.p1()[originalfname]
937 originalmode = gitmode[oldfctx.flags()]
937 originalmode = gitmode[oldfctx.flags()]
938 pchange.oldPath = originalfname
938 pchange.oldPath = originalfname
939
939
940 if originalfname in removed:
940 if originalfname in removed:
941 origpchange = phabchange(
941 origpchange = phabchange(
942 currentPath=originalfname,
942 currentPath=originalfname,
943 oldPath=originalfname,
943 oldPath=originalfname,
944 type=DiffChangeType.MOVE_AWAY,
944 type=DiffChangeType.MOVE_AWAY,
945 awayPaths=[fname],
945 awayPaths=[fname],
946 )
946 )
947 movedchanges[originalfname] = origpchange
947 movedchanges[originalfname] = origpchange
948 removed.remove(originalfname)
948 removed.remove(originalfname)
949 pchange.type = DiffChangeType.MOVE_HERE
949 pchange.type = DiffChangeType.MOVE_HERE
950 elif originalfname in movedchanges:
950 elif originalfname in movedchanges:
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
952 movedchanges[originalfname].awayPaths.append(fname)
952 movedchanges[originalfname].awayPaths.append(fname)
953 pchange.type = DiffChangeType.COPY_HERE
953 pchange.type = DiffChangeType.COPY_HERE
954 else: # pure copy
954 else: # pure copy
955 if originalfname not in copiedchanges:
955 if originalfname not in copiedchanges:
956 origpchange = phabchange(
956 origpchange = phabchange(
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
958 )
958 )
959 copiedchanges[originalfname] = origpchange
959 copiedchanges[originalfname] = origpchange
960 else:
960 else:
961 origpchange = copiedchanges[originalfname]
961 origpchange = copiedchanges[originalfname]
962 origpchange.awayPaths.append(fname)
962 origpchange.awayPaths.append(fname)
963 pchange.type = DiffChangeType.COPY_HERE
963 pchange.type = DiffChangeType.COPY_HERE
964
964
965 if filemode != originalmode:
965 if filemode != originalmode:
966 pchange.addoldmode(originalmode)
966 pchange.addoldmode(originalmode)
967 pchange.addnewmode(filemode)
967 pchange.addnewmode(filemode)
968 else: # Brand-new file
968 else: # Brand-new file
969 pchange.addnewmode(gitmode[fctx.flags()])
969 pchange.addnewmode(gitmode[fctx.flags()])
970 pchange.type = DiffChangeType.ADD
970 pchange.type = DiffChangeType.ADD
971
971
972 if (
972 if (
973 fctx.isbinary()
973 fctx.isbinary()
974 or notutf8(fctx)
974 or notutf8(fctx)
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
976 ):
976 ):
977 makebinary(pchange, fctx)
977 makebinary(pchange, fctx)
978 if renamed:
978 if renamed:
979 addoldbinary(pchange, oldfctx, fctx)
979 addoldbinary(pchange, oldfctx, fctx)
980 else:
980 else:
981 maketext(pchange, basectx, ctx, fname)
981 maketext(pchange, basectx, ctx, fname)
982
982
983 pdiff.addchange(pchange)
983 pdiff.addchange(pchange)
984
984
985 for _path, copiedchange in copiedchanges.items():
985 for _path, copiedchange in copiedchanges.items():
986 pdiff.addchange(copiedchange)
986 pdiff.addchange(copiedchange)
987 for _path, movedchange in movedchanges.items():
987 for _path, movedchange in movedchanges.items():
988 pdiff.addchange(movedchange)
988 pdiff.addchange(movedchange)
989
989
990
990
991 def creatediff(basectx, ctx):
991 def creatediff(basectx, ctx):
992 """create a Differential Diff"""
992 """create a Differential Diff"""
993 repo = ctx.repo()
993 repo = ctx.repo()
994 repophid = getrepophid(repo)
994 repophid = getrepophid(repo)
995 # Create a "Differential Diff" via "differential.creatediff" API
995 # Create a "Differential Diff" via "differential.creatediff" API
996 pdiff = phabdiff(
996 pdiff = phabdiff(
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
998 branch=b'%s' % ctx.branch(),
998 branch=b'%s' % ctx.branch(),
999 )
999 )
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1001 # addadded will remove moved files from removed, so addremoved won't get
1001 # addadded will remove moved files from removed, so addremoved won't get
1002 # them
1002 # them
1003 addadded(pdiff, basectx, ctx, added, removed)
1003 addadded(pdiff, basectx, ctx, added, removed)
1004 addmodified(pdiff, basectx, ctx, modified)
1004 addmodified(pdiff, basectx, ctx, modified)
1005 addremoved(pdiff, basectx, ctx, removed)
1005 addremoved(pdiff, basectx, ctx, removed)
1006 if repophid:
1006 if repophid:
1007 pdiff.repositoryPHID = repophid
1007 pdiff.repositoryPHID = repophid
1008 diff = callconduit(
1008 diff = callconduit(
1009 repo.ui,
1009 repo.ui,
1010 b'differential.creatediff',
1010 b'differential.creatediff',
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1012 )
1012 )
1013 if not diff:
1013 if not diff:
1014 if basectx != ctx:
1014 if basectx != ctx:
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1016 else:
1016 else:
1017 msg = _(b'cannot create diff for %s') % ctx
1017 msg = _(b'cannot create diff for %s') % ctx
1018 raise error.Abort(msg)
1018 raise error.Abort(msg)
1019 return diff
1019 return diff
1020
1020
1021
1021
1022 def writediffproperties(ctxs, diff):
1022 def writediffproperties(ctxs, diff):
1023 """write metadata to diff so patches could be applied losslessly
1023 """write metadata to diff so patches could be applied losslessly
1024
1024
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1026 The list is generally a single commit, but may be several when using
1026 The list is generally a single commit, but may be several when using
1027 ``phabsend --fold``.
1027 ``phabsend --fold``.
1028 """
1028 """
1029 # creatediff returns with a diffid but query returns with an id
1029 # creatediff returns with a diffid but query returns with an id
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1031 basectx = ctxs[0]
1031 basectx = ctxs[0]
1032 tipctx = ctxs[-1]
1032 tipctx = ctxs[-1]
1033
1033
1034 params = {
1034 params = {
1035 b'diff_id': diffid,
1035 b'diff_id': diffid,
1036 b'name': b'hg:meta',
1036 b'name': b'hg:meta',
1037 b'data': templatefilters.json(
1037 b'data': templatefilters.json(
1038 {
1038 {
1039 b'user': tipctx.user(),
1039 b'user': tipctx.user(),
1040 b'date': b'%d %d' % tipctx.date(),
1040 b'date': b'%d %d' % tipctx.date(),
1041 b'branch': tipctx.branch(),
1041 b'branch': tipctx.branch(),
1042 b'node': tipctx.hex(),
1042 b'node': tipctx.hex(),
1043 b'parent': basectx.p1().hex(),
1043 b'parent': basectx.p1().hex(),
1044 }
1044 }
1045 ),
1045 ),
1046 }
1046 }
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1048
1048
1049 commits = {}
1049 commits = {}
1050 for ctx in ctxs:
1050 for ctx in ctxs:
1051 commits[ctx.hex()] = {
1051 commits[ctx.hex()] = {
1052 b'author': stringutil.person(ctx.user()),
1052 b'author': stringutil.person(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1054 b'time': int(ctx.date()[0]),
1054 b'time': int(ctx.date()[0]),
1055 b'commit': ctx.hex(),
1055 b'commit': ctx.hex(),
1056 b'parents': [ctx.p1().hex()],
1056 b'parents': [ctx.p1().hex()],
1057 b'branch': ctx.branch(),
1057 b'branch': ctx.branch(),
1058 }
1058 }
1059 params = {
1059 params = {
1060 b'diff_id': diffid,
1060 b'diff_id': diffid,
1061 b'name': b'local:commits',
1061 b'name': b'local:commits',
1062 b'data': templatefilters.json(commits),
1062 b'data': templatefilters.json(commits),
1063 }
1063 }
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1065
1065
1066
1066
1067 def createdifferentialrevision(
1067 def createdifferentialrevision(
1068 ctxs,
1068 ctxs,
1069 revid=None,
1069 revid=None,
1070 parentrevphid=None,
1070 parentrevphid=None,
1071 oldbasenode=None,
1071 oldbasenode=None,
1072 oldnode=None,
1072 oldnode=None,
1073 olddiff=None,
1073 olddiff=None,
1074 actions=None,
1074 actions=None,
1075 comment=None,
1075 comment=None,
1076 ):
1076 ):
1077 """create or update a Differential Revision
1077 """create or update a Differential Revision
1078
1078
1079 If revid is None, create a new Differential Revision, otherwise update
1079 If revid is None, create a new Differential Revision, otherwise update
1080 revid. If parentrevphid is not None, set it as a dependency.
1080 revid. If parentrevphid is not None, set it as a dependency.
1081
1081
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1083 be a list of that single context. Otherwise, it is a list that covers the
1083 be a list of that single context. Otherwise, it is a list that covers the
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1085 to include and ``ctxs[-1]`` is the last.
1085 to include and ``ctxs[-1]`` is the last.
1086
1086
1087 If oldnode is not None, check if the patch content (without commit message
1087 If oldnode is not None, check if the patch content (without commit message
1088 and metadata) has changed before creating another diff. For a Revision with
1088 and metadata) has changed before creating another diff. For a Revision with
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1092 corresponds to ``ctxs[-1]``.
1092 corresponds to ``ctxs[-1]``.
1093
1093
1094 If actions is not None, they will be appended to the transaction.
1094 If actions is not None, they will be appended to the transaction.
1095 """
1095 """
1096 ctx = ctxs[-1]
1096 ctx = ctxs[-1]
1097 basectx = ctxs[0]
1097 basectx = ctxs[0]
1098
1098
1099 repo = ctx.repo()
1099 repo = ctx.repo()
1100 if oldnode:
1100 if oldnode:
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1102 unfi = repo.unfiltered()
1102 unfi = repo.unfiltered()
1103 oldctx = unfi[oldnode]
1103 oldctx = unfi[oldnode]
1104 oldbasectx = unfi[oldbasenode]
1104 oldbasectx = unfi[oldbasenode]
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1106 oldbasectx, oldctx, diffopts
1106 oldbasectx, oldctx, diffopts
1107 )
1107 )
1108 else:
1108 else:
1109 neednewdiff = True
1109 neednewdiff = True
1110
1110
1111 transactions = []
1111 transactions = []
1112 if neednewdiff:
1112 if neednewdiff:
1113 diff = creatediff(basectx, ctx)
1113 diff = creatediff(basectx, ctx)
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1115 if comment:
1115 if comment:
1116 transactions.append({b'type': b'comment', b'value': comment})
1116 transactions.append({b'type': b'comment', b'value': comment})
1117 else:
1117 else:
1118 # Even if we don't need to upload a new diff because the patch content
1118 # Even if we don't need to upload a new diff because the patch content
1119 # does not change. We might still need to update its metadata so
1119 # does not change. We might still need to update its metadata so
1120 # pushers could know the correct node metadata.
1120 # pushers could know the correct node metadata.
1121 assert olddiff
1121 assert olddiff
1122 diff = olddiff
1122 diff = olddiff
1123 writediffproperties(ctxs, diff)
1123 writediffproperties(ctxs, diff)
1124
1124
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1126 if parentrevphid:
1126 if parentrevphid:
1127 transactions.append(
1127 transactions.append(
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1129 )
1129 )
1130
1130
1131 if actions:
1131 if actions:
1132 transactions += actions
1132 transactions += actions
1133
1133
1134 # When folding multiple local commits into a single review, arcanist will
1134 # When folding multiple local commits into a single review, arcanist will
1135 # take the summary line of the first commit as the title, and then
1135 # take the summary line of the first commit as the title, and then
1136 # concatenate the rest of the remaining messages (including each of their
1136 # concatenate the rest of the remaining messages (including each of their
1137 # first lines) to the rest of the first commit message (each separated by
1137 # first lines) to the rest of the first commit message (each separated by
1138 # an empty line), and use that as the summary field. Do the same here.
1138 # an empty line), and use that as the summary field. Do the same here.
1139 # For commits with only a one line message, there is no summary field, as
1139 # For commits with only a one line message, there is no summary field, as
1140 # this gets assigned to the title.
1140 # this gets assigned to the title.
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1142
1142
1143 for i, _ctx in enumerate(ctxs):
1143 for i, _ctx in enumerate(ctxs):
1144 # Parse commit message and update related fields.
1144 # Parse commit message and update related fields.
1145 desc = _ctx.description()
1145 desc = _ctx.description()
1146 info = callconduit(
1146 info = callconduit(
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1148 )
1148 )
1149
1149
1150 for k in [b'title', b'summary', b'testPlan']:
1150 for k in [b'title', b'summary', b'testPlan']:
1151 v = info[b'fields'].get(k)
1151 v = info[b'fields'].get(k)
1152 if not v:
1152 if not v:
1153 continue
1153 continue
1154
1154
1155 if i == 0:
1155 if i == 0:
1156 # Title, summary and test plan (if present) are taken verbatim
1156 # Title, summary and test plan (if present) are taken verbatim
1157 # for the first commit.
1157 # for the first commit.
1158 fields[k] = v.rstrip()
1158 fields[k] = v.rstrip()
1159 continue
1159 continue
1160 elif k == b'title':
1160 elif k == b'title':
1161 # Add subsequent titles (i.e. the first line of the commit
1161 # Add subsequent titles (i.e. the first line of the commit
1162 # message) back to the summary.
1162 # message) back to the summary.
1163 k = b'summary'
1163 k = b'summary'
1164
1164
1165 # Append any current field to the existing composite field
1165 # Append any current field to the existing composite field
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1167
1167
1168 for k, v in fields.items():
1168 for k, v in fields.items():
1169 transactions.append({b'type': k, b'value': v})
1169 transactions.append({b'type': k, b'value': v})
1170
1170
1171 params = {b'transactions': transactions}
1171 params = {b'transactions': transactions}
1172 if revid is not None:
1172 if revid is not None:
1173 # Update an existing Differential Revision
1173 # Update an existing Differential Revision
1174 params[b'objectIdentifier'] = revid
1174 params[b'objectIdentifier'] = revid
1175
1175
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1177 if not revision:
1177 if not revision:
1178 if len(ctxs) == 1:
1178 if len(ctxs) == 1:
1179 msg = _(b'cannot create revision for %s') % ctx
1179 msg = _(b'cannot create revision for %s') % ctx
1180 else:
1180 else:
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1182 raise error.Abort(msg)
1182 raise error.Abort(msg)
1183
1183
1184 return revision, diff
1184 return revision, diff
1185
1185
1186
1186
1187 def userphids(ui, names):
1187 def userphids(ui, names):
1188 """convert user names to PHIDs"""
1188 """convert user names to PHIDs"""
1189 names = [name.lower() for name in names]
1189 names = [name.lower() for name in names]
1190 query = {b'constraints': {b'usernames': names}}
1190 query = {b'constraints': {b'usernames': names}}
1191 result = callconduit(ui, b'user.search', query)
1191 result = callconduit(ui, b'user.search', query)
1192 # username not found is not an error of the API. So check if we have missed
1192 # username not found is not an error of the API. So check if we have missed
1193 # some names here.
1193 # some names here.
1194 data = result[b'data']
1194 data = result[b'data']
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1196 unresolved = set(names) - resolved
1196 unresolved = set(names) - resolved
1197 if unresolved:
1197 if unresolved:
1198 raise error.Abort(
1198 raise error.Abort(
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1200 )
1200 )
1201 return [entry[b'phid'] for entry in data]
1201 return [entry[b'phid'] for entry in data]
1202
1202
1203
1203
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1206
1206
1207 This is a utility function for the sending phase of ``phabsend``, which
1207 This is a utility function for the sending phase of ``phabsend``, which
1208 makes it easier to show a status for all local commits with `--fold``.
1208 makes it easier to show a status for all local commits with `--fold``.
1209 """
1209 """
1210 actiondesc = ui.label(
1210 actiondesc = ui.label(
1211 {
1211 {
1212 b'created': _(b'created'),
1212 b'created': _(b'created'),
1213 b'skipped': _(b'skipped'),
1213 b'skipped': _(b'skipped'),
1214 b'updated': _(b'updated'),
1214 b'updated': _(b'updated'),
1215 }[action],
1215 }[action],
1216 b'phabricator.action.%s' % action,
1216 b'phabricator.action.%s' % action,
1217 )
1217 )
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1222
1222
1223
1223
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1226
1226
1227 This is a utility function for the amend phase of ``phabsend``, which
1227 This is a utility function for the amend phase of ``phabsend``, which
1228 converts failures to warning messages.
1228 converts failures to warning messages.
1229 """
1229 """
1230 _debug(
1230 _debug(
1231 unfi.ui,
1231 unfi.ui,
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1233 )
1233 )
1234
1234
1235 try:
1235 try:
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1237 except util.urlerr.urlerror:
1237 except util.urlerr.urlerror:
1238 # If it fails just warn and keep going, otherwise the DREV
1238 # If it fails just warn and keep going, otherwise the DREV
1239 # associations will be lost
1239 # associations will be lost
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1241
1241
1242
1242
1243 @vcrcommand(
1243 @vcrcommand(
1244 b'phabsend',
1244 b'phabsend',
1245 [
1245 [
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1250 (
1250 (
1251 b'm',
1251 b'm',
1252 b'comment',
1252 b'comment',
1253 b'',
1253 b'',
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1255 ),
1255 ),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1258 ],
1258 ],
1259 _(b'REV [OPTIONS]'),
1259 _(b'REV [OPTIONS]'),
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1261 )
1261 )
1262 def phabsend(ui, repo, *revs, **opts):
1262 def phabsend(ui, repo, *revs, **opts):
1263 """upload changesets to Phabricator
1263 """upload changesets to Phabricator
1264
1264
1265 If there are multiple revisions specified, they will be send as a stack
1265 If there are multiple revisions specified, they will be send as a stack
1266 with a linear dependencies relationship using the order specified by the
1266 with a linear dependencies relationship using the order specified by the
1267 revset.
1267 revset.
1268
1268
1269 For the first time uploading changesets, local tags will be created to
1269 For the first time uploading changesets, local tags will be created to
1270 maintain the association. After the first time, phabsend will check
1270 maintain the association. After the first time, phabsend will check
1271 obsstore and tags information so it can figure out whether to update an
1271 obsstore and tags information so it can figure out whether to update an
1272 existing Differential Revision, or create a new one.
1272 existing Differential Revision, or create a new one.
1273
1273
1274 If --amend is set, update commit messages so they have the
1274 If --amend is set, update commit messages so they have the
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1277 use local tags to record the ``Differential Revision`` association.
1277 use local tags to record the ``Differential Revision`` association.
1278
1278
1279 The --confirm option lets you confirm changesets before sending them. You
1279 The --confirm option lets you confirm changesets before sending them. You
1280 can also add following to your configuration file to make it default
1280 can also add following to your configuration file to make it default
1281 behaviour::
1281 behaviour::
1282
1282
1283 [phabsend]
1283 [phabsend]
1284 confirm = true
1284 confirm = true
1285
1285
1286 By default, a separate review will be created for each commit that is
1286 By default, a separate review will be created for each commit that is
1287 selected, and will have the same parent/child relationship in Phabricator.
1287 selected, and will have the same parent/child relationship in Phabricator.
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1289 as if diffed from the parent of the first revision to the last. The commit
1289 as if diffed from the parent of the first revision to the last. The commit
1290 messages are concatenated in the summary field on Phabricator.
1290 messages are concatenated in the summary field on Phabricator.
1291
1291
1292 phabsend will check obsstore and the above association to decide whether to
1292 phabsend will check obsstore and the above association to decide whether to
1293 update an existing Differential Revision, or create a new one.
1293 update an existing Differential Revision, or create a new one.
1294 """
1294 """
1295 opts = pycompat.byteskwargs(opts)
1295 opts = pycompat.byteskwargs(opts)
1296 revs = list(revs) + opts.get(b'rev', [])
1296 revs = list(revs) + opts.get(b'rev', [])
1297 revs = scmutil.revrange(repo, revs)
1297 revs = scmutil.revrange(repo, revs)
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1299
1299
1300 if not revs:
1300 if not revs:
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1302 if opts.get(b'amend'):
1302 if opts.get(b'amend'):
1303 cmdutil.checkunfinished(repo)
1303 cmdutil.checkunfinished(repo)
1304
1304
1305 ctxs = [repo[rev] for rev in revs]
1305 ctxs = [repo[rev] for rev in revs]
1306
1306
1307 if any(c for c in ctxs if c.obsolete()):
1307 if any(c for c in ctxs if c.obsolete()):
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1309
1309
1310 fold = opts.get(b'fold')
1310 fold = opts.get(b'fold')
1311 if fold:
1311 if fold:
1312 if len(revs) == 1:
1312 if len(revs) == 1:
1313 # TODO: just switch to --no-fold instead?
1313 # TODO: just switch to --no-fold instead?
1314 raise error.Abort(_(b"cannot fold a single revision"))
1314 raise error.Abort(_(b"cannot fold a single revision"))
1315
1315
1316 # There's no clear way to manage multiple commits with a Dxxx tag, so
1316 # There's no clear way to manage multiple commits with a Dxxx tag, so
1317 # require the amend option. (We could append "_nnn", but then it
1317 # require the amend option. (We could append "_nnn", but then it
1318 # becomes jumbled if earlier commits are added to an update.) It should
1318 # becomes jumbled if earlier commits are added to an update.) It should
1319 # lock the repo and ensure that the range is editable, but that would
1319 # lock the repo and ensure that the range is editable, but that would
1320 # make the code pretty convoluted. The default behavior of `arc` is to
1320 # make the code pretty convoluted. The default behavior of `arc` is to
1321 # create a new review anyway.
1321 # create a new review anyway.
1322 if not opts.get(b"amend"):
1322 if not opts.get(b"amend"):
1323 raise error.Abort(_(b"cannot fold with --no-amend"))
1323 raise error.Abort(_(b"cannot fold with --no-amend"))
1324
1324
1325 # Ensure the local commits are an unbroken range
1325 # Ensure the local commits are an unbroken range
1326 revrange = repo.revs(b'(first(%ld)::last(%ld))', revs, revs)
1326 revrange = repo.revs(b'(first(%ld)::last(%ld))', revs, revs)
1327 if any(r for r in revs if r not in revrange) or any(
1327 if any(r for r in revs if r not in revrange) or any(
1328 r for r in revrange if r not in revs
1328 r for r in revrange if r not in revs
1329 ):
1329 ):
1330 raise error.Abort(_(b"cannot fold non-linear revisions"))
1330 raise error.Abort(_(b"cannot fold non-linear revisions"))
1331
1331
1332 # It might be possible to bucketize the revisions by the DREV value, and
1332 # It might be possible to bucketize the revisions by the DREV value, and
1333 # iterate over those groups when posting, and then again when amending.
1333 # iterate over those groups when posting, and then again when amending.
1334 # But for simplicity, require all selected revisions to be for the same
1334 # But for simplicity, require all selected revisions to be for the same
1335 # DREV (if present). Adding local revisions to an existing DREV is
1335 # DREV (if present). Adding local revisions to an existing DREV is
1336 # acceptable.
1336 # acceptable.
1337 drevmatchers = [
1337 drevmatchers = [
1338 _differentialrevisiondescre.search(ctx.description())
1338 _differentialrevisiondescre.search(ctx.description())
1339 for ctx in ctxs
1339 for ctx in ctxs
1340 ]
1340 ]
1341 if len({m.group('url') for m in drevmatchers if m}) > 1:
1341 if len({m.group('url') for m in drevmatchers if m}) > 1:
1342 raise error.Abort(
1342 raise error.Abort(
1343 _(b"cannot fold revisions with different DREV values")
1343 _(b"cannot fold revisions with different DREV values")
1344 )
1344 )
1345
1345
1346 # {newnode: (oldnode, olddiff, olddrev}
1346 # {newnode: (oldnode, olddiff, olddrev}
1347 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1347 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1348
1348
1349 confirm = ui.configbool(b'phabsend', b'confirm')
1349 confirm = ui.configbool(b'phabsend', b'confirm')
1350 confirm |= bool(opts.get(b'confirm'))
1350 confirm |= bool(opts.get(b'confirm'))
1351 if confirm:
1351 if confirm:
1352 confirmed = _confirmbeforesend(repo, revs, oldmap)
1352 confirmed = _confirmbeforesend(repo, revs, oldmap)
1353 if not confirmed:
1353 if not confirmed:
1354 raise error.Abort(_(b'phabsend cancelled'))
1354 raise error.Abort(_(b'phabsend cancelled'))
1355
1355
1356 actions = []
1356 actions = []
1357 reviewers = opts.get(b'reviewer', [])
1357 reviewers = opts.get(b'reviewer', [])
1358 blockers = opts.get(b'blocker', [])
1358 blockers = opts.get(b'blocker', [])
1359 phids = []
1359 phids = []
1360 if reviewers:
1360 if reviewers:
1361 phids.extend(userphids(repo.ui, reviewers))
1361 phids.extend(userphids(repo.ui, reviewers))
1362 if blockers:
1362 if blockers:
1363 phids.extend(
1363 phids.extend(
1364 map(
1364 map(
1365 lambda phid: b'blocking(%s)' % phid,
1365 lambda phid: b'blocking(%s)' % phid,
1366 userphids(repo.ui, blockers),
1366 userphids(repo.ui, blockers),
1367 )
1367 )
1368 )
1368 )
1369 if phids:
1369 if phids:
1370 actions.append({b'type': b'reviewers.add', b'value': phids})
1370 actions.append({b'type': b'reviewers.add', b'value': phids})
1371
1371
1372 drevids = [] # [int]
1372 drevids = [] # [int]
1373 diffmap = {} # {newnode: diff}
1373 diffmap = {} # {newnode: diff}
1374
1374
1375 # Send patches one by one so we know their Differential Revision PHIDs and
1375 # Send patches one by one so we know their Differential Revision PHIDs and
1376 # can provide dependency relationship
1376 # can provide dependency relationship
1377 lastrevphid = None
1377 lastrevphid = None
1378 for ctx in ctxs:
1378 for ctx in ctxs:
1379 if fold:
1379 if fold:
1380 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1380 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1381 else:
1381 else:
1382 ui.debug(b'sending rev %d\n' % ctx.rev())
1382 ui.debug(b'sending rev %d\n' % ctx.rev())
1383
1383
1384 # Get Differential Revision ID
1384 # Get Differential Revision ID
1385 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1385 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1386 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1386 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1387
1387
1388 if fold:
1388 if fold:
1389 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1389 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1390 ctxs[-1].node(), (None, None, None)
1390 ctxs[-1].node(), (None, None, None)
1391 )
1391 )
1392
1392
1393 if oldnode != ctx.node() or opts.get(b'amend'):
1393 if oldnode != ctx.node() or opts.get(b'amend'):
1394 # Create or update Differential Revision
1394 # Create or update Differential Revision
1395 revision, diff = createdifferentialrevision(
1395 revision, diff = createdifferentialrevision(
1396 ctxs if fold else [ctx],
1396 ctxs if fold else [ctx],
1397 revid,
1397 revid,
1398 lastrevphid,
1398 lastrevphid,
1399 oldbasenode,
1399 oldbasenode,
1400 oldnode,
1400 oldnode,
1401 olddiff,
1401 olddiff,
1402 actions,
1402 actions,
1403 opts.get(b'comment'),
1403 opts.get(b'comment'),
1404 )
1404 )
1405
1405
1406 if fold:
1406 if fold:
1407 for ctx in ctxs:
1407 for ctx in ctxs:
1408 diffmap[ctx.node()] = diff
1408 diffmap[ctx.node()] = diff
1409 else:
1409 else:
1410 diffmap[ctx.node()] = diff
1410 diffmap[ctx.node()] = diff
1411
1411
1412 newrevid = int(revision[b'object'][b'id'])
1412 newrevid = int(revision[b'object'][b'id'])
1413 newrevphid = revision[b'object'][b'phid']
1413 newrevphid = revision[b'object'][b'phid']
1414 if revid:
1414 if revid:
1415 action = b'updated'
1415 action = b'updated'
1416 else:
1416 else:
1417 action = b'created'
1417 action = b'created'
1418
1418
1419 # Create a local tag to note the association, if commit message
1419 # Create a local tag to note the association, if commit message
1420 # does not have it already
1420 # does not have it already
1421 if not fold:
1421 if not fold:
1422 m = _differentialrevisiondescre.search(ctx.description())
1422 m = _differentialrevisiondescre.search(ctx.description())
1423 if not m or int(m.group('id')) != newrevid:
1423 if not m or int(m.group('id')) != newrevid:
1424 tagname = b'D%d' % newrevid
1424 tagname = b'D%d' % newrevid
1425 tags.tag(
1425 tags.tag(
1426 repo,
1426 repo,
1427 tagname,
1427 tagname,
1428 ctx.node(),
1428 ctx.node(),
1429 message=None,
1429 message=None,
1430 user=None,
1430 user=None,
1431 date=None,
1431 date=None,
1432 local=True,
1432 local=True,
1433 )
1433 )
1434 else:
1434 else:
1435 # Nothing changed. But still set "newrevphid" so the next revision
1435 # Nothing changed. But still set "newrevphid" so the next revision
1436 # could depend on this one and "newrevid" for the summary line.
1436 # could depend on this one and "newrevid" for the summary line.
1437 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1437 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1438 newrevid = revid
1438 newrevid = revid
1439 action = b'skipped'
1439 action = b'skipped'
1440
1440
1441 drevids.append(newrevid)
1441 drevids.append(newrevid)
1442 lastrevphid = newrevphid
1442 lastrevphid = newrevphid
1443
1443
1444 if fold:
1444 if fold:
1445 for c in ctxs:
1445 for c in ctxs:
1446 if oldmap.get(c.node(), (None, None, None))[2]:
1446 if oldmap.get(c.node(), (None, None, None))[2]:
1447 action = b'updated'
1447 action = b'updated'
1448 else:
1448 else:
1449 action = b'created'
1449 action = b'created'
1450 _print_phabsend_action(ui, c, newrevid, action)
1450 _print_phabsend_action(ui, c, newrevid, action)
1451 break
1451 break
1452
1452
1453 _print_phabsend_action(ui, ctx, newrevid, action)
1453 _print_phabsend_action(ui, ctx, newrevid, action)
1454
1454
1455 # Update commit messages and remove tags
1455 # Update commit messages and remove tags
1456 if opts.get(b'amend'):
1456 if opts.get(b'amend'):
1457 unfi = repo.unfiltered()
1457 unfi = repo.unfiltered()
1458 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1458 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1459 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1459 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1460 # Eagerly evaluate commits to restabilize before creating new
1461 # commits. The selected revisions are excluded because they are
1462 # automatically restacked as part of the submission process.
1463 restack = [
1464 c
1465 for c in repo.set(
1466 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1467 revs,
1468 revs,
1469 )
1470 ]
1460 wnode = unfi[b'.'].node()
1471 wnode = unfi[b'.'].node()
1461 mapping = {} # {oldnode: [newnode]}
1472 mapping = {} # {oldnode: [newnode]}
1462 newnodes = []
1473 newnodes = []
1463
1474
1464 drevid = drevids[0]
1475 drevid = drevids[0]
1465
1476
1466 for i, rev in enumerate(revs):
1477 for i, rev in enumerate(revs):
1467 old = unfi[rev]
1478 old = unfi[rev]
1468 if not fold:
1479 if not fold:
1469 drevid = drevids[i]
1480 drevid = drevids[i]
1470 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1481 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1471
1482
1472 newdesc = get_amended_desc(drev, old, fold)
1483 newdesc = get_amended_desc(drev, old, fold)
1473 # Make sure commit message contain "Differential Revision"
1484 # Make sure commit message contain "Differential Revision"
1474 if (
1485 if (
1475 old.description() != newdesc
1486 old.description() != newdesc
1476 or old.p1().node() in mapping
1487 or old.p1().node() in mapping
1477 or old.p2().node() in mapping
1488 or old.p2().node() in mapping
1478 ):
1489 ):
1479 if old.phase() == phases.public:
1490 if old.phase() == phases.public:
1480 ui.warn(
1491 ui.warn(
1481 _(b"warning: not updating public commit %s\n")
1492 _(b"warning: not updating public commit %s\n")
1482 % scmutil.formatchangeid(old)
1493 % scmutil.formatchangeid(old)
1483 )
1494 )
1484 continue
1495 continue
1485 parents = [
1496 parents = [
1486 mapping.get(old.p1().node(), (old.p1(),))[0],
1497 mapping.get(old.p1().node(), (old.p1(),))[0],
1487 mapping.get(old.p2().node(), (old.p2(),))[0],
1498 mapping.get(old.p2().node(), (old.p2(),))[0],
1488 ]
1499 ]
1489 new = context.metadataonlyctx(
1500 new = context.metadataonlyctx(
1490 repo,
1501 repo,
1491 old,
1502 old,
1492 parents=parents,
1503 parents=parents,
1493 text=newdesc,
1504 text=newdesc,
1494 user=old.user(),
1505 user=old.user(),
1495 date=old.date(),
1506 date=old.date(),
1496 extra=old.extra(),
1507 extra=old.extra(),
1497 )
1508 )
1498
1509
1499 newnode = new.commit()
1510 newnode = new.commit()
1500
1511
1501 mapping[old.node()] = [newnode]
1512 mapping[old.node()] = [newnode]
1502
1513
1503 if fold:
1514 if fold:
1504 # Defer updating the (single) Diff until all nodes are
1515 # Defer updating the (single) Diff until all nodes are
1505 # collected. No tags were created, so none need to be
1516 # collected. No tags were created, so none need to be
1506 # removed.
1517 # removed.
1507 newnodes.append(newnode)
1518 newnodes.append(newnode)
1508 continue
1519 continue
1509
1520
1510 _amend_diff_properties(
1521 _amend_diff_properties(
1511 unfi, drevid, [newnode], diffmap[old.node()]
1522 unfi, drevid, [newnode], diffmap[old.node()]
1512 )
1523 )
1513
1524
1514 # Remove local tags since it's no longer necessary
1525 # Remove local tags since it's no longer necessary
1515 tagname = b'D%d' % drevid
1526 tagname = b'D%d' % drevid
1516 if tagname in repo.tags():
1527 if tagname in repo.tags():
1517 tags.tag(
1528 tags.tag(
1518 repo,
1529 repo,
1519 tagname,
1530 tagname,
1520 nullid,
1531 nullid,
1521 message=None,
1532 message=None,
1522 user=None,
1533 user=None,
1523 date=None,
1534 date=None,
1524 local=True,
1535 local=True,
1525 )
1536 )
1526 elif fold:
1537 elif fold:
1527 # When folding multiple commits into one review with
1538 # When folding multiple commits into one review with
1528 # --fold, track even the commits that weren't amended, so
1539 # --fold, track even the commits that weren't amended, so
1529 # that their association isn't lost if the properties are
1540 # that their association isn't lost if the properties are
1530 # rewritten below.
1541 # rewritten below.
1531 newnodes.append(old.node())
1542 newnodes.append(old.node())
1532
1543
1533 # If the submitted commits are public, no amend takes place so
1544 # If the submitted commits are public, no amend takes place so
1534 # there are no newnodes and therefore no diff update to do.
1545 # there are no newnodes and therefore no diff update to do.
1535 if fold and newnodes:
1546 if fold and newnodes:
1536 diff = diffmap[old.node()]
1547 diff = diffmap[old.node()]
1537
1548
1538 # The diff object in diffmap doesn't have the local commits
1549 # The diff object in diffmap doesn't have the local commits
1539 # because that could be returned from differential.creatediff,
1550 # because that could be returned from differential.creatediff,
1540 # not differential.querydiffs. So use the queried diff (if
1551 # not differential.querydiffs. So use the queried diff (if
1541 # present), or force the amend (a new revision is being posted.)
1552 # present), or force the amend (a new revision is being posted.)
1542 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1553 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1543 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1554 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1544 _amend_diff_properties(unfi, drevid, newnodes, diff)
1555 _amend_diff_properties(unfi, drevid, newnodes, diff)
1545 else:
1556 else:
1546 _debug(
1557 _debug(
1547 ui,
1558 ui,
1548 b"local commit list for D%d is already up-to-date\n"
1559 b"local commit list for D%d is already up-to-date\n"
1549 % drevid,
1560 % drevid,
1550 )
1561 )
1551 elif fold:
1562 elif fold:
1552 _debug(ui, b"no newnodes to update\n")
1563 _debug(ui, b"no newnodes to update\n")
1553
1564
1565 # Restack any children of first-time submissions that were orphaned
1566 # in the process. The ctx won't report that it is an orphan until
1567 # the cleanup takes place below.
1568 for old in restack:
1569 parents = [
1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1572 ]
1573 new = context.metadataonlyctx(
1574 repo,
1575 old,
1576 parents=parents,
1577 text=old.description(),
1578 user=old.user(),
1579 date=old.date(),
1580 extra=old.extra(),
1581 )
1582
1583 newnode = new.commit()
1584
1585 # Don't obsolete unselected descendants of nodes that have not
1586 # been changed in this transaction- that results in an error.
1587 if newnode != old.node():
1588 mapping[old.node()] = [newnode]
1589 _debug(
1590 ui,
1591 b"restabilizing %s as %s\n"
1592 % (short(old.node()), short(newnode)),
1593 )
1594 else:
1595 _debug(
1596 ui,
1597 b"not restabilizing unchanged %s\n" % short(old.node()),
1598 )
1599
1554 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1600 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1555 if wnode in mapping:
1601 if wnode in mapping:
1556 unfi.setparents(mapping[wnode][0])
1602 unfi.setparents(mapping[wnode][0])
1557
1603
1558
1604
1559 # Map from "hg:meta" keys to header understood by "hg import". The order is
1605 # Map from "hg:meta" keys to header understood by "hg import". The order is
1560 # consistent with "hg export" output.
1606 # consistent with "hg export" output.
1561 _metanamemap = util.sortdict(
1607 _metanamemap = util.sortdict(
1562 [
1608 [
1563 (b'user', b'User'),
1609 (b'user', b'User'),
1564 (b'date', b'Date'),
1610 (b'date', b'Date'),
1565 (b'branch', b'Branch'),
1611 (b'branch', b'Branch'),
1566 (b'node', b'Node ID'),
1612 (b'node', b'Node ID'),
1567 (b'parent', b'Parent '),
1613 (b'parent', b'Parent '),
1568 ]
1614 ]
1569 )
1615 )
1570
1616
1571
1617
1572 def _confirmbeforesend(repo, revs, oldmap):
1618 def _confirmbeforesend(repo, revs, oldmap):
1573 url, token = readurltoken(repo.ui)
1619 url, token = readurltoken(repo.ui)
1574 ui = repo.ui
1620 ui = repo.ui
1575 for rev in revs:
1621 for rev in revs:
1576 ctx = repo[rev]
1622 ctx = repo[rev]
1577 desc = ctx.description().splitlines()[0]
1623 desc = ctx.description().splitlines()[0]
1578 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1624 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1579 if drevid:
1625 if drevid:
1580 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1626 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1581 else:
1627 else:
1582 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1628 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1583
1629
1584 ui.write(
1630 ui.write(
1585 _(b'%s - %s: %s\n')
1631 _(b'%s - %s: %s\n')
1586 % (
1632 % (
1587 drevdesc,
1633 drevdesc,
1588 ui.label(bytes(ctx), b'phabricator.node'),
1634 ui.label(bytes(ctx), b'phabricator.node'),
1589 ui.label(desc, b'phabricator.desc'),
1635 ui.label(desc, b'phabricator.desc'),
1590 )
1636 )
1591 )
1637 )
1592
1638
1593 if ui.promptchoice(
1639 if ui.promptchoice(
1594 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1640 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1595 ):
1641 ):
1596 return False
1642 return False
1597
1643
1598 return True
1644 return True
1599
1645
1600
1646
1601 _knownstatusnames = {
1647 _knownstatusnames = {
1602 b'accepted',
1648 b'accepted',
1603 b'needsreview',
1649 b'needsreview',
1604 b'needsrevision',
1650 b'needsrevision',
1605 b'closed',
1651 b'closed',
1606 b'abandoned',
1652 b'abandoned',
1607 b'changesplanned',
1653 b'changesplanned',
1608 }
1654 }
1609
1655
1610
1656
1611 def _getstatusname(drev):
1657 def _getstatusname(drev):
1612 """get normalized status name from a Differential Revision"""
1658 """get normalized status name from a Differential Revision"""
1613 return drev[b'statusName'].replace(b' ', b'').lower()
1659 return drev[b'statusName'].replace(b' ', b'').lower()
1614
1660
1615
1661
1616 # Small language to specify differential revisions. Support symbols: (), :X,
1662 # Small language to specify differential revisions. Support symbols: (), :X,
1617 # +, and -.
1663 # +, and -.
1618
1664
1619 _elements = {
1665 _elements = {
1620 # token-type: binding-strength, primary, prefix, infix, suffix
1666 # token-type: binding-strength, primary, prefix, infix, suffix
1621 b'(': (12, None, (b'group', 1, b')'), None, None),
1667 b'(': (12, None, (b'group', 1, b')'), None, None),
1622 b':': (8, None, (b'ancestors', 8), None, None),
1668 b':': (8, None, (b'ancestors', 8), None, None),
1623 b'&': (5, None, None, (b'and_', 5), None),
1669 b'&': (5, None, None, (b'and_', 5), None),
1624 b'+': (4, None, None, (b'add', 4), None),
1670 b'+': (4, None, None, (b'add', 4), None),
1625 b'-': (4, None, None, (b'sub', 4), None),
1671 b'-': (4, None, None, (b'sub', 4), None),
1626 b')': (0, None, None, None, None),
1672 b')': (0, None, None, None, None),
1627 b'symbol': (0, b'symbol', None, None, None),
1673 b'symbol': (0, b'symbol', None, None, None),
1628 b'end': (0, None, None, None, None),
1674 b'end': (0, None, None, None, None),
1629 }
1675 }
1630
1676
1631
1677
1632 def _tokenize(text):
1678 def _tokenize(text):
1633 view = memoryview(text) # zero-copy slice
1679 view = memoryview(text) # zero-copy slice
1634 special = b'():+-& '
1680 special = b'():+-& '
1635 pos = 0
1681 pos = 0
1636 length = len(text)
1682 length = len(text)
1637 while pos < length:
1683 while pos < length:
1638 symbol = b''.join(
1684 symbol = b''.join(
1639 itertools.takewhile(
1685 itertools.takewhile(
1640 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1686 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1641 )
1687 )
1642 )
1688 )
1643 if symbol:
1689 if symbol:
1644 yield (b'symbol', symbol, pos)
1690 yield (b'symbol', symbol, pos)
1645 pos += len(symbol)
1691 pos += len(symbol)
1646 else: # special char, ignore space
1692 else: # special char, ignore space
1647 if text[pos : pos + 1] != b' ':
1693 if text[pos : pos + 1] != b' ':
1648 yield (text[pos : pos + 1], None, pos)
1694 yield (text[pos : pos + 1], None, pos)
1649 pos += 1
1695 pos += 1
1650 yield (b'end', None, pos)
1696 yield (b'end', None, pos)
1651
1697
1652
1698
1653 def _parse(text):
1699 def _parse(text):
1654 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1700 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1655 if pos != len(text):
1701 if pos != len(text):
1656 raise error.ParseError(b'invalid token', pos)
1702 raise error.ParseError(b'invalid token', pos)
1657 return tree
1703 return tree
1658
1704
1659
1705
1660 def _parsedrev(symbol):
1706 def _parsedrev(symbol):
1661 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1707 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1662 if symbol.startswith(b'D') and symbol[1:].isdigit():
1708 if symbol.startswith(b'D') and symbol[1:].isdigit():
1663 return int(symbol[1:])
1709 return int(symbol[1:])
1664 if symbol.isdigit():
1710 if symbol.isdigit():
1665 return int(symbol)
1711 return int(symbol)
1666
1712
1667
1713
1668 def _prefetchdrevs(tree):
1714 def _prefetchdrevs(tree):
1669 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1715 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1670 drevs = set()
1716 drevs = set()
1671 ancestordrevs = set()
1717 ancestordrevs = set()
1672 op = tree[0]
1718 op = tree[0]
1673 if op == b'symbol':
1719 if op == b'symbol':
1674 r = _parsedrev(tree[1])
1720 r = _parsedrev(tree[1])
1675 if r:
1721 if r:
1676 drevs.add(r)
1722 drevs.add(r)
1677 elif op == b'ancestors':
1723 elif op == b'ancestors':
1678 r, a = _prefetchdrevs(tree[1])
1724 r, a = _prefetchdrevs(tree[1])
1679 drevs.update(r)
1725 drevs.update(r)
1680 ancestordrevs.update(r)
1726 ancestordrevs.update(r)
1681 ancestordrevs.update(a)
1727 ancestordrevs.update(a)
1682 else:
1728 else:
1683 for t in tree[1:]:
1729 for t in tree[1:]:
1684 r, a = _prefetchdrevs(t)
1730 r, a = _prefetchdrevs(t)
1685 drevs.update(r)
1731 drevs.update(r)
1686 ancestordrevs.update(a)
1732 ancestordrevs.update(a)
1687 return drevs, ancestordrevs
1733 return drevs, ancestordrevs
1688
1734
1689
1735
1690 def querydrev(ui, spec):
1736 def querydrev(ui, spec):
1691 """return a list of "Differential Revision" dicts
1737 """return a list of "Differential Revision" dicts
1692
1738
1693 spec is a string using a simple query language, see docstring in phabread
1739 spec is a string using a simple query language, see docstring in phabread
1694 for details.
1740 for details.
1695
1741
1696 A "Differential Revision dict" looks like:
1742 A "Differential Revision dict" looks like:
1697
1743
1698 {
1744 {
1699 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1745 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1700 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1746 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1701 "auxiliary": {
1747 "auxiliary": {
1702 "phabricator:depends-on": [
1748 "phabricator:depends-on": [
1703 "PHID-DREV-gbapp366kutjebt7agcd"
1749 "PHID-DREV-gbapp366kutjebt7agcd"
1704 ]
1750 ]
1705 "phabricator:projects": [],
1751 "phabricator:projects": [],
1706 },
1752 },
1707 "branch": "default",
1753 "branch": "default",
1708 "ccs": [],
1754 "ccs": [],
1709 "commits": [],
1755 "commits": [],
1710 "dateCreated": "1499181406",
1756 "dateCreated": "1499181406",
1711 "dateModified": "1499182103",
1757 "dateModified": "1499182103",
1712 "diffs": [
1758 "diffs": [
1713 "3",
1759 "3",
1714 "4",
1760 "4",
1715 ],
1761 ],
1716 "hashes": [],
1762 "hashes": [],
1717 "id": "2",
1763 "id": "2",
1718 "lineCount": "2",
1764 "lineCount": "2",
1719 "phid": "PHID-DREV-672qvysjcczopag46qty",
1765 "phid": "PHID-DREV-672qvysjcczopag46qty",
1720 "properties": {},
1766 "properties": {},
1721 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1767 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1722 "reviewers": [],
1768 "reviewers": [],
1723 "sourcePath": null
1769 "sourcePath": null
1724 "status": "0",
1770 "status": "0",
1725 "statusName": "Needs Review",
1771 "statusName": "Needs Review",
1726 "summary": "",
1772 "summary": "",
1727 "testPlan": "",
1773 "testPlan": "",
1728 "title": "example",
1774 "title": "example",
1729 "uri": "https://phab.example.com/D2",
1775 "uri": "https://phab.example.com/D2",
1730 }
1776 }
1731 """
1777 """
1732 # TODO: replace differential.query and differential.querydiffs with
1778 # TODO: replace differential.query and differential.querydiffs with
1733 # differential.diff.search because the former (and their output) are
1779 # differential.diff.search because the former (and their output) are
1734 # frozen, and planned to be deprecated and removed.
1780 # frozen, and planned to be deprecated and removed.
1735
1781
1736 def fetch(params):
1782 def fetch(params):
1737 """params -> single drev or None"""
1783 """params -> single drev or None"""
1738 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1784 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1739 if key in prefetched:
1785 if key in prefetched:
1740 return prefetched[key]
1786 return prefetched[key]
1741 drevs = callconduit(ui, b'differential.query', params)
1787 drevs = callconduit(ui, b'differential.query', params)
1742 # Fill prefetched with the result
1788 # Fill prefetched with the result
1743 for drev in drevs:
1789 for drev in drevs:
1744 prefetched[drev[b'phid']] = drev
1790 prefetched[drev[b'phid']] = drev
1745 prefetched[int(drev[b'id'])] = drev
1791 prefetched[int(drev[b'id'])] = drev
1746 if key not in prefetched:
1792 if key not in prefetched:
1747 raise error.Abort(
1793 raise error.Abort(
1748 _(b'cannot get Differential Revision %r') % params
1794 _(b'cannot get Differential Revision %r') % params
1749 )
1795 )
1750 return prefetched[key]
1796 return prefetched[key]
1751
1797
1752 def getstack(topdrevids):
1798 def getstack(topdrevids):
1753 """given a top, get a stack from the bottom, [id] -> [id]"""
1799 """given a top, get a stack from the bottom, [id] -> [id]"""
1754 visited = set()
1800 visited = set()
1755 result = []
1801 result = []
1756 queue = [{b'ids': [i]} for i in topdrevids]
1802 queue = [{b'ids': [i]} for i in topdrevids]
1757 while queue:
1803 while queue:
1758 params = queue.pop()
1804 params = queue.pop()
1759 drev = fetch(params)
1805 drev = fetch(params)
1760 if drev[b'id'] in visited:
1806 if drev[b'id'] in visited:
1761 continue
1807 continue
1762 visited.add(drev[b'id'])
1808 visited.add(drev[b'id'])
1763 result.append(int(drev[b'id']))
1809 result.append(int(drev[b'id']))
1764 auxiliary = drev.get(b'auxiliary', {})
1810 auxiliary = drev.get(b'auxiliary', {})
1765 depends = auxiliary.get(b'phabricator:depends-on', [])
1811 depends = auxiliary.get(b'phabricator:depends-on', [])
1766 for phid in depends:
1812 for phid in depends:
1767 queue.append({b'phids': [phid]})
1813 queue.append({b'phids': [phid]})
1768 result.reverse()
1814 result.reverse()
1769 return smartset.baseset(result)
1815 return smartset.baseset(result)
1770
1816
1771 # Initialize prefetch cache
1817 # Initialize prefetch cache
1772 prefetched = {} # {id or phid: drev}
1818 prefetched = {} # {id or phid: drev}
1773
1819
1774 tree = _parse(spec)
1820 tree = _parse(spec)
1775 drevs, ancestordrevs = _prefetchdrevs(tree)
1821 drevs, ancestordrevs = _prefetchdrevs(tree)
1776
1822
1777 # developer config: phabricator.batchsize
1823 # developer config: phabricator.batchsize
1778 batchsize = ui.configint(b'phabricator', b'batchsize')
1824 batchsize = ui.configint(b'phabricator', b'batchsize')
1779
1825
1780 # Prefetch Differential Revisions in batch
1826 # Prefetch Differential Revisions in batch
1781 tofetch = set(drevs)
1827 tofetch = set(drevs)
1782 for r in ancestordrevs:
1828 for r in ancestordrevs:
1783 tofetch.update(range(max(1, r - batchsize), r + 1))
1829 tofetch.update(range(max(1, r - batchsize), r + 1))
1784 if drevs:
1830 if drevs:
1785 fetch({b'ids': list(tofetch)})
1831 fetch({b'ids': list(tofetch)})
1786 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1832 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1787
1833
1788 # Walk through the tree, return smartsets
1834 # Walk through the tree, return smartsets
1789 def walk(tree):
1835 def walk(tree):
1790 op = tree[0]
1836 op = tree[0]
1791 if op == b'symbol':
1837 if op == b'symbol':
1792 drev = _parsedrev(tree[1])
1838 drev = _parsedrev(tree[1])
1793 if drev:
1839 if drev:
1794 return smartset.baseset([drev])
1840 return smartset.baseset([drev])
1795 elif tree[1] in _knownstatusnames:
1841 elif tree[1] in _knownstatusnames:
1796 drevs = [
1842 drevs = [
1797 r
1843 r
1798 for r in validids
1844 for r in validids
1799 if _getstatusname(prefetched[r]) == tree[1]
1845 if _getstatusname(prefetched[r]) == tree[1]
1800 ]
1846 ]
1801 return smartset.baseset(drevs)
1847 return smartset.baseset(drevs)
1802 else:
1848 else:
1803 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1849 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1804 elif op in {b'and_', b'add', b'sub'}:
1850 elif op in {b'and_', b'add', b'sub'}:
1805 assert len(tree) == 3
1851 assert len(tree) == 3
1806 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1852 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1807 elif op == b'group':
1853 elif op == b'group':
1808 return walk(tree[1])
1854 return walk(tree[1])
1809 elif op == b'ancestors':
1855 elif op == b'ancestors':
1810 return getstack(walk(tree[1]))
1856 return getstack(walk(tree[1]))
1811 else:
1857 else:
1812 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1858 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1813
1859
1814 return [prefetched[r] for r in walk(tree)]
1860 return [prefetched[r] for r in walk(tree)]
1815
1861
1816
1862
1817 def getdescfromdrev(drev):
1863 def getdescfromdrev(drev):
1818 """get description (commit message) from "Differential Revision"
1864 """get description (commit message) from "Differential Revision"
1819
1865
1820 This is similar to differential.getcommitmessage API. But we only care
1866 This is similar to differential.getcommitmessage API. But we only care
1821 about limited fields: title, summary, test plan, and URL.
1867 about limited fields: title, summary, test plan, and URL.
1822 """
1868 """
1823 title = drev[b'title']
1869 title = drev[b'title']
1824 summary = drev[b'summary'].rstrip()
1870 summary = drev[b'summary'].rstrip()
1825 testplan = drev[b'testPlan'].rstrip()
1871 testplan = drev[b'testPlan'].rstrip()
1826 if testplan:
1872 if testplan:
1827 testplan = b'Test Plan:\n%s' % testplan
1873 testplan = b'Test Plan:\n%s' % testplan
1828 uri = b'Differential Revision: %s' % drev[b'uri']
1874 uri = b'Differential Revision: %s' % drev[b'uri']
1829 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1875 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1830
1876
1831
1877
1832 def get_amended_desc(drev, ctx, folded):
1878 def get_amended_desc(drev, ctx, folded):
1833 """similar to ``getdescfromdrev``, but supports a folded series of commits
1879 """similar to ``getdescfromdrev``, but supports a folded series of commits
1834
1880
1835 This is used when determining if an individual commit needs to have its
1881 This is used when determining if an individual commit needs to have its
1836 message amended after posting it for review. The determination is made for
1882 message amended after posting it for review. The determination is made for
1837 each individual commit, even when they were folded into one review.
1883 each individual commit, even when they were folded into one review.
1838 """
1884 """
1839 if not folded:
1885 if not folded:
1840 return getdescfromdrev(drev)
1886 return getdescfromdrev(drev)
1841
1887
1842 uri = b'Differential Revision: %s' % drev[b'uri']
1888 uri = b'Differential Revision: %s' % drev[b'uri']
1843
1889
1844 # Since the commit messages were combined when posting multiple commits
1890 # Since the commit messages were combined when posting multiple commits
1845 # with --fold, the fields can't be read from Phabricator here, or *all*
1891 # with --fold, the fields can't be read from Phabricator here, or *all*
1846 # affected local revisions will end up with the same commit message after
1892 # affected local revisions will end up with the same commit message after
1847 # the URI is amended in. Append in the DREV line, or update it if it
1893 # the URI is amended in. Append in the DREV line, or update it if it
1848 # exists. At worst, this means commit message or test plan updates on
1894 # exists. At worst, this means commit message or test plan updates on
1849 # Phabricator aren't propagated back to the repository, but that seems
1895 # Phabricator aren't propagated back to the repository, but that seems
1850 # reasonable for the case where local commits are effectively combined
1896 # reasonable for the case where local commits are effectively combined
1851 # in Phabricator.
1897 # in Phabricator.
1852 m = _differentialrevisiondescre.search(ctx.description())
1898 m = _differentialrevisiondescre.search(ctx.description())
1853 if not m:
1899 if not m:
1854 return b'\n\n'.join([ctx.description(), uri])
1900 return b'\n\n'.join([ctx.description(), uri])
1855
1901
1856 return _differentialrevisiondescre.sub(uri, ctx.description())
1902 return _differentialrevisiondescre.sub(uri, ctx.description())
1857
1903
1858
1904
1859 def getlocalcommits(diff):
1905 def getlocalcommits(diff):
1860 """get the set of local commits from a diff object
1906 """get the set of local commits from a diff object
1861
1907
1862 See ``getdiffmeta()`` for an example diff object.
1908 See ``getdiffmeta()`` for an example diff object.
1863 """
1909 """
1864 props = diff.get(b'properties') or {}
1910 props = diff.get(b'properties') or {}
1865 commits = props.get(b'local:commits') or {}
1911 commits = props.get(b'local:commits') or {}
1866 if len(commits) > 1:
1912 if len(commits) > 1:
1867 return {bin(c) for c in commits.keys()}
1913 return {bin(c) for c in commits.keys()}
1868
1914
1869 # Storing the diff metadata predates storing `local:commits`, so continue
1915 # Storing the diff metadata predates storing `local:commits`, so continue
1870 # to use that in the --no-fold case.
1916 # to use that in the --no-fold case.
1871 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1917 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1872
1918
1873
1919
1874 def getdiffmeta(diff):
1920 def getdiffmeta(diff):
1875 """get commit metadata (date, node, user, p1) from a diff object
1921 """get commit metadata (date, node, user, p1) from a diff object
1876
1922
1877 The metadata could be "hg:meta", sent by phabsend, like:
1923 The metadata could be "hg:meta", sent by phabsend, like:
1878
1924
1879 "properties": {
1925 "properties": {
1880 "hg:meta": {
1926 "hg:meta": {
1881 "branch": "default",
1927 "branch": "default",
1882 "date": "1499571514 25200",
1928 "date": "1499571514 25200",
1883 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1929 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1884 "user": "Foo Bar <foo@example.com>",
1930 "user": "Foo Bar <foo@example.com>",
1885 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1931 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1886 }
1932 }
1887 }
1933 }
1888
1934
1889 Or converted from "local:commits", sent by "arc", like:
1935 Or converted from "local:commits", sent by "arc", like:
1890
1936
1891 "properties": {
1937 "properties": {
1892 "local:commits": {
1938 "local:commits": {
1893 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1939 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1894 "author": "Foo Bar",
1940 "author": "Foo Bar",
1895 "authorEmail": "foo@example.com"
1941 "authorEmail": "foo@example.com"
1896 "branch": "default",
1942 "branch": "default",
1897 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1943 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1898 "local": "1000",
1944 "local": "1000",
1899 "message": "...",
1945 "message": "...",
1900 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1946 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1901 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1947 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1902 "summary": "...",
1948 "summary": "...",
1903 "tag": "",
1949 "tag": "",
1904 "time": 1499546314,
1950 "time": 1499546314,
1905 }
1951 }
1906 }
1952 }
1907 }
1953 }
1908
1954
1909 Note: metadata extracted from "local:commits" will lose time zone
1955 Note: metadata extracted from "local:commits" will lose time zone
1910 information.
1956 information.
1911 """
1957 """
1912 props = diff.get(b'properties') or {}
1958 props = diff.get(b'properties') or {}
1913 meta = props.get(b'hg:meta')
1959 meta = props.get(b'hg:meta')
1914 if not meta:
1960 if not meta:
1915 if props.get(b'local:commits'):
1961 if props.get(b'local:commits'):
1916 commit = sorted(props[b'local:commits'].values())[0]
1962 commit = sorted(props[b'local:commits'].values())[0]
1917 meta = {}
1963 meta = {}
1918 if b'author' in commit and b'authorEmail' in commit:
1964 if b'author' in commit and b'authorEmail' in commit:
1919 meta[b'user'] = b'%s <%s>' % (
1965 meta[b'user'] = b'%s <%s>' % (
1920 commit[b'author'],
1966 commit[b'author'],
1921 commit[b'authorEmail'],
1967 commit[b'authorEmail'],
1922 )
1968 )
1923 if b'time' in commit:
1969 if b'time' in commit:
1924 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1970 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1925 if b'branch' in commit:
1971 if b'branch' in commit:
1926 meta[b'branch'] = commit[b'branch']
1972 meta[b'branch'] = commit[b'branch']
1927 node = commit.get(b'commit', commit.get(b'rev'))
1973 node = commit.get(b'commit', commit.get(b'rev'))
1928 if node:
1974 if node:
1929 meta[b'node'] = node
1975 meta[b'node'] = node
1930 if len(commit.get(b'parents', ())) >= 1:
1976 if len(commit.get(b'parents', ())) >= 1:
1931 meta[b'parent'] = commit[b'parents'][0]
1977 meta[b'parent'] = commit[b'parents'][0]
1932 else:
1978 else:
1933 meta = {}
1979 meta = {}
1934 if b'date' not in meta and b'dateCreated' in diff:
1980 if b'date' not in meta and b'dateCreated' in diff:
1935 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1981 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1936 if b'branch' not in meta and diff.get(b'branch'):
1982 if b'branch' not in meta and diff.get(b'branch'):
1937 meta[b'branch'] = diff[b'branch']
1983 meta[b'branch'] = diff[b'branch']
1938 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1984 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1939 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1985 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1940 return meta
1986 return meta
1941
1987
1942
1988
1943 def _getdrevs(ui, stack, specs):
1989 def _getdrevs(ui, stack, specs):
1944 """convert user supplied DREVSPECs into "Differential Revision" dicts
1990 """convert user supplied DREVSPECs into "Differential Revision" dicts
1945
1991
1946 See ``hg help phabread`` for how to specify each DREVSPEC.
1992 See ``hg help phabread`` for how to specify each DREVSPEC.
1947 """
1993 """
1948 if len(specs) > 0:
1994 if len(specs) > 0:
1949
1995
1950 def _formatspec(s):
1996 def _formatspec(s):
1951 if stack:
1997 if stack:
1952 s = b':(%s)' % s
1998 s = b':(%s)' % s
1953 return b'(%s)' % s
1999 return b'(%s)' % s
1954
2000
1955 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2001 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
1956
2002
1957 drevs = querydrev(ui, spec)
2003 drevs = querydrev(ui, spec)
1958 if drevs:
2004 if drevs:
1959 return drevs
2005 return drevs
1960
2006
1961 raise error.Abort(_(b"empty DREVSPEC set"))
2007 raise error.Abort(_(b"empty DREVSPEC set"))
1962
2008
1963
2009
1964 def readpatch(ui, drevs, write):
2010 def readpatch(ui, drevs, write):
1965 """generate plain-text patch readable by 'hg import'
2011 """generate plain-text patch readable by 'hg import'
1966
2012
1967 write takes a list of (DREV, bytes), where DREV is the differential number
2013 write takes a list of (DREV, bytes), where DREV is the differential number
1968 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2014 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1969 to be imported. drevs is what "querydrev" returns, results of
2015 to be imported. drevs is what "querydrev" returns, results of
1970 "differential.query".
2016 "differential.query".
1971 """
2017 """
1972 # Prefetch hg:meta property for all diffs
2018 # Prefetch hg:meta property for all diffs
1973 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2019 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1974 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2020 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1975
2021
1976 patches = []
2022 patches = []
1977
2023
1978 # Generate patch for each drev
2024 # Generate patch for each drev
1979 for drev in drevs:
2025 for drev in drevs:
1980 ui.note(_(b'reading D%s\n') % drev[b'id'])
2026 ui.note(_(b'reading D%s\n') % drev[b'id'])
1981
2027
1982 diffid = max(int(v) for v in drev[b'diffs'])
2028 diffid = max(int(v) for v in drev[b'diffs'])
1983 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2029 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1984 desc = getdescfromdrev(drev)
2030 desc = getdescfromdrev(drev)
1985 header = b'# HG changeset patch\n'
2031 header = b'# HG changeset patch\n'
1986
2032
1987 # Try to preserve metadata from hg:meta property. Write hg patch
2033 # Try to preserve metadata from hg:meta property. Write hg patch
1988 # headers that can be read by the "import" command. See patchheadermap
2034 # headers that can be read by the "import" command. See patchheadermap
1989 # and extract in mercurial/patch.py for supported headers.
2035 # and extract in mercurial/patch.py for supported headers.
1990 meta = getdiffmeta(diffs[b'%d' % diffid])
2036 meta = getdiffmeta(diffs[b'%d' % diffid])
1991 for k in _metanamemap.keys():
2037 for k in _metanamemap.keys():
1992 if k in meta:
2038 if k in meta:
1993 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2039 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1994
2040
1995 content = b'%s%s\n%s' % (header, desc, body)
2041 content = b'%s%s\n%s' % (header, desc, body)
1996 patches.append((drev[b'id'], content))
2042 patches.append((drev[b'id'], content))
1997
2043
1998 # Write patches to the supplied callback
2044 # Write patches to the supplied callback
1999 write(patches)
2045 write(patches)
2000
2046
2001
2047
2002 @vcrcommand(
2048 @vcrcommand(
2003 b'phabread',
2049 b'phabread',
2004 [(b'', b'stack', False, _(b'read dependencies'))],
2050 [(b'', b'stack', False, _(b'read dependencies'))],
2005 _(b'DREVSPEC... [OPTIONS]'),
2051 _(b'DREVSPEC... [OPTIONS]'),
2006 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2052 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2007 optionalrepo=True,
2053 optionalrepo=True,
2008 )
2054 )
2009 def phabread(ui, repo, *specs, **opts):
2055 def phabread(ui, repo, *specs, **opts):
2010 """print patches from Phabricator suitable for importing
2056 """print patches from Phabricator suitable for importing
2011
2057
2012 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2058 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2013 the number ``123``. It could also have common operators like ``+``, ``-``,
2059 the number ``123``. It could also have common operators like ``+``, ``-``,
2014 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2060 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2015 select a stack. If multiple DREVSPEC values are given, the result is the
2061 select a stack. If multiple DREVSPEC values are given, the result is the
2016 union of each individually evaluated value. No attempt is currently made
2062 union of each individually evaluated value. No attempt is currently made
2017 to reorder the values to run from parent to child.
2063 to reorder the values to run from parent to child.
2018
2064
2019 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2065 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2020 could be used to filter patches by status. For performance reason, they
2066 could be used to filter patches by status. For performance reason, they
2021 only represent a subset of non-status selections and cannot be used alone.
2067 only represent a subset of non-status selections and cannot be used alone.
2022
2068
2023 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2069 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2024 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2070 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2025 stack up to D9.
2071 stack up to D9.
2026
2072
2027 If --stack is given, follow dependencies information and read all patches.
2073 If --stack is given, follow dependencies information and read all patches.
2028 It is equivalent to the ``:`` operator.
2074 It is equivalent to the ``:`` operator.
2029 """
2075 """
2030 opts = pycompat.byteskwargs(opts)
2076 opts = pycompat.byteskwargs(opts)
2031 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2077 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2032
2078
2033 def _write(patches):
2079 def _write(patches):
2034 for drev, content in patches:
2080 for drev, content in patches:
2035 ui.write(content)
2081 ui.write(content)
2036
2082
2037 readpatch(ui, drevs, _write)
2083 readpatch(ui, drevs, _write)
2038
2084
2039
2085
2040 @vcrcommand(
2086 @vcrcommand(
2041 b'phabimport',
2087 b'phabimport',
2042 [(b'', b'stack', False, _(b'import dependencies as well'))],
2088 [(b'', b'stack', False, _(b'import dependencies as well'))],
2043 _(b'DREVSPEC... [OPTIONS]'),
2089 _(b'DREVSPEC... [OPTIONS]'),
2044 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2090 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2045 )
2091 )
2046 def phabimport(ui, repo, *specs, **opts):
2092 def phabimport(ui, repo, *specs, **opts):
2047 """import patches from Phabricator for the specified Differential Revisions
2093 """import patches from Phabricator for the specified Differential Revisions
2048
2094
2049 The patches are read and applied starting at the parent of the working
2095 The patches are read and applied starting at the parent of the working
2050 directory.
2096 directory.
2051
2097
2052 See ``hg help phabread`` for how to specify DREVSPEC.
2098 See ``hg help phabread`` for how to specify DREVSPEC.
2053 """
2099 """
2054 opts = pycompat.byteskwargs(opts)
2100 opts = pycompat.byteskwargs(opts)
2055
2101
2056 # --bypass avoids losing exec and symlink bits when importing on Windows,
2102 # --bypass avoids losing exec and symlink bits when importing on Windows,
2057 # and allows importing with a dirty wdir. It also aborts instead of leaving
2103 # and allows importing with a dirty wdir. It also aborts instead of leaving
2058 # rejects.
2104 # rejects.
2059 opts[b'bypass'] = True
2105 opts[b'bypass'] = True
2060
2106
2061 # Mandatory default values, synced with commands.import
2107 # Mandatory default values, synced with commands.import
2062 opts[b'strip'] = 1
2108 opts[b'strip'] = 1
2063 opts[b'prefix'] = b''
2109 opts[b'prefix'] = b''
2064 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2110 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2065 opts[b'obsolete'] = False
2111 opts[b'obsolete'] = False
2066
2112
2067 if ui.configbool(b'phabimport', b'secret'):
2113 if ui.configbool(b'phabimport', b'secret'):
2068 opts[b'secret'] = True
2114 opts[b'secret'] = True
2069 if ui.configbool(b'phabimport', b'obsolete'):
2115 if ui.configbool(b'phabimport', b'obsolete'):
2070 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2116 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2071
2117
2072 def _write(patches):
2118 def _write(patches):
2073 parents = repo[None].parents()
2119 parents = repo[None].parents()
2074
2120
2075 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2121 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2076 for drev, contents in patches:
2122 for drev, contents in patches:
2077 ui.status(_(b'applying patch from D%s\n') % drev)
2123 ui.status(_(b'applying patch from D%s\n') % drev)
2078
2124
2079 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2125 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2080 msg, node, rej = cmdutil.tryimportone(
2126 msg, node, rej = cmdutil.tryimportone(
2081 ui,
2127 ui,
2082 repo,
2128 repo,
2083 patchdata,
2129 patchdata,
2084 parents,
2130 parents,
2085 opts,
2131 opts,
2086 [],
2132 [],
2087 None, # Never update wdir to another revision
2133 None, # Never update wdir to another revision
2088 )
2134 )
2089
2135
2090 if not node:
2136 if not node:
2091 raise error.Abort(_(b'D%s: no diffs found') % drev)
2137 raise error.Abort(_(b'D%s: no diffs found') % drev)
2092
2138
2093 ui.note(msg + b'\n')
2139 ui.note(msg + b'\n')
2094 parents = [repo[node]]
2140 parents = [repo[node]]
2095
2141
2096 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2142 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2097
2143
2098 readpatch(repo.ui, drevs, _write)
2144 readpatch(repo.ui, drevs, _write)
2099
2145
2100
2146
2101 @vcrcommand(
2147 @vcrcommand(
2102 b'phabupdate',
2148 b'phabupdate',
2103 [
2149 [
2104 (b'', b'accept', False, _(b'accept revisions')),
2150 (b'', b'accept', False, _(b'accept revisions')),
2105 (b'', b'reject', False, _(b'reject revisions')),
2151 (b'', b'reject', False, _(b'reject revisions')),
2106 (b'', b'abandon', False, _(b'abandon revisions')),
2152 (b'', b'abandon', False, _(b'abandon revisions')),
2107 (b'', b'reclaim', False, _(b'reclaim revisions')),
2153 (b'', b'reclaim', False, _(b'reclaim revisions')),
2108 (b'm', b'comment', b'', _(b'comment on the last revision')),
2154 (b'm', b'comment', b'', _(b'comment on the last revision')),
2109 ],
2155 ],
2110 _(b'DREVSPEC... [OPTIONS]'),
2156 _(b'DREVSPEC... [OPTIONS]'),
2111 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2157 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2112 optionalrepo=True,
2158 optionalrepo=True,
2113 )
2159 )
2114 def phabupdate(ui, repo, *specs, **opts):
2160 def phabupdate(ui, repo, *specs, **opts):
2115 """update Differential Revision in batch
2161 """update Differential Revision in batch
2116
2162
2117 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2163 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2118 """
2164 """
2119 opts = pycompat.byteskwargs(opts)
2165 opts = pycompat.byteskwargs(opts)
2120 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
2166 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
2121 if len(flags) > 1:
2167 if len(flags) > 1:
2122 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2168 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2123
2169
2124 actions = []
2170 actions = []
2125 for f in flags:
2171 for f in flags:
2126 actions.append({b'type': f, b'value': True})
2172 actions.append({b'type': f, b'value': True})
2127
2173
2128 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2174 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2129 for i, drev in enumerate(drevs):
2175 for i, drev in enumerate(drevs):
2130 if i + 1 == len(drevs) and opts.get(b'comment'):
2176 if i + 1 == len(drevs) and opts.get(b'comment'):
2131 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2177 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2132 if actions:
2178 if actions:
2133 params = {
2179 params = {
2134 b'objectIdentifier': drev[b'phid'],
2180 b'objectIdentifier': drev[b'phid'],
2135 b'transactions': actions,
2181 b'transactions': actions,
2136 }
2182 }
2137 callconduit(ui, b'differential.revision.edit', params)
2183 callconduit(ui, b'differential.revision.edit', params)
2138
2184
2139
2185
2140 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2186 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2141 def template_review(context, mapping):
2187 def template_review(context, mapping):
2142 """:phabreview: Object describing the review for this changeset.
2188 """:phabreview: Object describing the review for this changeset.
2143 Has attributes `url` and `id`.
2189 Has attributes `url` and `id`.
2144 """
2190 """
2145 ctx = context.resource(mapping, b'ctx')
2191 ctx = context.resource(mapping, b'ctx')
2146 m = _differentialrevisiondescre.search(ctx.description())
2192 m = _differentialrevisiondescre.search(ctx.description())
2147 if m:
2193 if m:
2148 return templateutil.hybriddict(
2194 return templateutil.hybriddict(
2149 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2195 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2150 )
2196 )
2151 else:
2197 else:
2152 tags = ctx.repo().nodetags(ctx.node())
2198 tags = ctx.repo().nodetags(ctx.node())
2153 for t in tags:
2199 for t in tags:
2154 if _differentialrevisiontagre.match(t):
2200 if _differentialrevisiontagre.match(t):
2155 url = ctx.repo().ui.config(b'phabricator', b'url')
2201 url = ctx.repo().ui.config(b'phabricator', b'url')
2156 if not url.endswith(b'/'):
2202 if not url.endswith(b'/'):
2157 url += b'/'
2203 url += b'/'
2158 url += t
2204 url += t
2159
2205
2160 return templateutil.hybriddict({b'url': url, b'id': t,})
2206 return templateutil.hybriddict({b'url': url, b'id': t,})
2161 return None
2207 return None
2162
2208
2163
2209
2164 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2210 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2165 def template_status(context, mapping):
2211 def template_status(context, mapping):
2166 """:phabstatus: String. Status of Phabricator differential.
2212 """:phabstatus: String. Status of Phabricator differential.
2167 """
2213 """
2168 ctx = context.resource(mapping, b'ctx')
2214 ctx = context.resource(mapping, b'ctx')
2169 repo = context.resource(mapping, b'repo')
2215 repo = context.resource(mapping, b'repo')
2170 ui = context.resource(mapping, b'ui')
2216 ui = context.resource(mapping, b'ui')
2171
2217
2172 rev = ctx.rev()
2218 rev = ctx.rev()
2173 try:
2219 try:
2174 drevid = getdrevmap(repo, [rev])[rev]
2220 drevid = getdrevmap(repo, [rev])[rev]
2175 except KeyError:
2221 except KeyError:
2176 return None
2222 return None
2177 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2223 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2178 for drev in drevs:
2224 for drev in drevs:
2179 if int(drev[b'id']) == drevid:
2225 if int(drev[b'id']) == drevid:
2180 return templateutil.hybriddict(
2226 return templateutil.hybriddict(
2181 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2227 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2182 )
2228 )
2183 return None
2229 return None
2184
2230
2185
2231
2186 @show.showview(b'phabstatus', csettopic=b'work')
2232 @show.showview(b'phabstatus', csettopic=b'work')
2187 def phabstatusshowview(ui, repo, displayer):
2233 def phabstatusshowview(ui, repo, displayer):
2188 """Phabricator differiential status"""
2234 """Phabricator differiential status"""
2189 revs = repo.revs('sort(_underway(), topo)')
2235 revs = repo.revs('sort(_underway(), topo)')
2190 drevmap = getdrevmap(repo, revs)
2236 drevmap = getdrevmap(repo, revs)
2191 unknownrevs, drevids, revsbydrevid = [], set(), {}
2237 unknownrevs, drevids, revsbydrevid = [], set(), {}
2192 for rev, drevid in pycompat.iteritems(drevmap):
2238 for rev, drevid in pycompat.iteritems(drevmap):
2193 if drevid is not None:
2239 if drevid is not None:
2194 drevids.add(drevid)
2240 drevids.add(drevid)
2195 revsbydrevid.setdefault(drevid, set()).add(rev)
2241 revsbydrevid.setdefault(drevid, set()).add(rev)
2196 else:
2242 else:
2197 unknownrevs.append(rev)
2243 unknownrevs.append(rev)
2198
2244
2199 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2245 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2200 drevsbyrev = {}
2246 drevsbyrev = {}
2201 for drev in drevs:
2247 for drev in drevs:
2202 for rev in revsbydrevid[int(drev[b'id'])]:
2248 for rev in revsbydrevid[int(drev[b'id'])]:
2203 drevsbyrev[rev] = drev
2249 drevsbyrev[rev] = drev
2204
2250
2205 def phabstatus(ctx):
2251 def phabstatus(ctx):
2206 drev = drevsbyrev[ctx.rev()]
2252 drev = drevsbyrev[ctx.rev()]
2207 status = ui.label(
2253 status = ui.label(
2208 b'%(statusName)s' % drev,
2254 b'%(statusName)s' % drev,
2209 b'phabricator.status.%s' % _getstatusname(drev),
2255 b'phabricator.status.%s' % _getstatusname(drev),
2210 )
2256 )
2211 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2257 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2212
2258
2213 revs -= smartset.baseset(unknownrevs)
2259 revs -= smartset.baseset(unknownrevs)
2214 revdag = graphmod.dagwalker(repo, revs)
2260 revdag = graphmod.dagwalker(repo, revs)
2215
2261
2216 ui.setconfig(b'experimental', b'graphshorten', True)
2262 ui.setconfig(b'experimental', b'graphshorten', True)
2217 displayer._exthook = phabstatus
2263 displayer._exthook = phabstatus
2218 nodelen = show.longestshortest(repo, revs)
2264 nodelen = show.longestshortest(repo, revs)
2219 logcmdutil.displaygraph(
2265 logcmdutil.displaygraph(
2220 ui,
2266 ui,
2221 repo,
2267 repo,
2222 revdag,
2268 revdag,
2223 displayer,
2269 displayer,
2224 graphmod.asciiedges,
2270 graphmod.asciiedges,
2225 props={b'nodelen': nodelen},
2271 props={b'nodelen': nodelen},
2226 )
2272 )
@@ -1,890 +1,961 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 >
5 >
6 > [auth]
6 > [auth]
7 > hgphab.schemes = https
7 > hgphab.schemes = https
8 > hgphab.prefix = phab.mercurial-scm.org
8 > hgphab.prefix = phab.mercurial-scm.org
9 > # When working on the extension and making phabricator interaction
9 > # When working on the extension and making phabricator interaction
10 > # changes, edit this to be a real phabricator token. When done, edit
10 > # changes, edit this to be a real phabricator token. When done, edit
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
12 > # token with this value.
12 > # token with this value.
13 > hgphab.phabtoken = cli-hahayouwish
13 > hgphab.phabtoken = cli-hahayouwish
14 >
14 >
15 > [phabricator]
15 > [phabricator]
16 > debug = True
16 > debug = True
17 > EOF
17 > EOF
18 $ hg init repo
18 $ hg init repo
19 $ cd repo
19 $ cd repo
20 $ cat >> .hg/hgrc <<EOF
20 $ cat >> .hg/hgrc <<EOF
21 > [phabricator]
21 > [phabricator]
22 > url = https://phab.mercurial-scm.org/
22 > url = https://phab.mercurial-scm.org/
23 > callsign = HG
23 > callsign = HG
24 > EOF
24 > EOF
25 $ VCR="$TESTDIR/phabricator"
25 $ VCR="$TESTDIR/phabricator"
26
26
27 Error is handled reasonably. We override the phabtoken here so that
27 Error is handled reasonably. We override the phabtoken here so that
28 when you're developing changes to phabricator.py you can edit the
28 when you're developing changes to phabricator.py you can edit the
29 above config and have a real token in the test but not have to edit
29 above config and have a real token in the test but not have to edit
30 this test.
30 this test.
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
34
34
35 Missing arguments don't crash, and may print the command help
35 Missing arguments don't crash, and may print the command help
36
36
37 $ hg debugcallconduit
37 $ hg debugcallconduit
38 hg debugcallconduit: invalid arguments
38 hg debugcallconduit: invalid arguments
39 hg debugcallconduit METHOD
39 hg debugcallconduit METHOD
40
40
41 call Conduit API
41 call Conduit API
42
42
43 options:
43 options:
44
44
45 (use 'hg debugcallconduit -h' to show more help)
45 (use 'hg debugcallconduit -h' to show more help)
46 [255]
46 [255]
47 $ hg phabread
47 $ hg phabread
48 abort: empty DREVSPEC set
48 abort: empty DREVSPEC set
49 [255]
49 [255]
50
50
51 Basic phabread:
51 Basic phabread:
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
53 # HG changeset patch
53 # HG changeset patch
54 # Date 1536771503 0
54 # Date 1536771503 0
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
56 exchangev2: start to implement pull with wire protocol v2
56 exchangev2: start to implement pull with wire protocol v2
57
57
58 Wire protocol version 2 will take a substantially different
58 Wire protocol version 2 will take a substantially different
59 approach to exchange than version 1 (at least as far as pulling
59 approach to exchange than version 1 (at least as far as pulling
60 is concerned).
60 is concerned).
61
61
62 This commit establishes a new exchangev2 module for holding
62 This commit establishes a new exchangev2 module for holding
63
63
64 Phabread with multiple DREVSPEC
64 Phabread with multiple DREVSPEC
65
65
66 TODO: attempt to order related revisions like --stack?
66 TODO: attempt to order related revisions like --stack?
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
68 > | grep '^Differential Revision'
68 > | grep '^Differential Revision'
69 Differential Revision: https://phab.mercurial-scm.org/D8205
69 Differential Revision: https://phab.mercurial-scm.org/D8205
70 Differential Revision: https://phab.mercurial-scm.org/D8206
70 Differential Revision: https://phab.mercurial-scm.org/D8206
71 Differential Revision: https://phab.mercurial-scm.org/D8207
71 Differential Revision: https://phab.mercurial-scm.org/D8207
72
72
73 Empty DREVSPECs don't crash
73 Empty DREVSPECs don't crash
74
74
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
76 abort: empty DREVSPEC set
76 abort: empty DREVSPEC set
77 [255]
77 [255]
78
78
79
79
80 phabupdate with an accept:
80 phabupdate with an accept:
81 $ hg phabupdate --accept D4564 \
81 $ hg phabupdate --accept D4564 \
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
83 > --test-vcr "$VCR/accept-4564.json"
83 > --test-vcr "$VCR/accept-4564.json"
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
86 [255]
86 [255]
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
88
88
89 Create a differential diff:
89 Create a differential diff:
90 $ HGENCODING=utf-8; export HGENCODING
90 $ HGENCODING=utf-8; export HGENCODING
91 $ echo alpha > alpha
91 $ echo alpha > alpha
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
93 adding alpha
93 adding alpha
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
96 new commits: ['347bf67801e5']
96 new commits: ['347bf67801e5']
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
98 $ echo more >> alpha
98 $ echo more >> alpha
99 $ HGEDITOR=true hg ci --amend
99 $ HGEDITOR=true hg ci --amend
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
101 $ echo beta > beta
101 $ echo beta > beta
102 $ hg ci --addremove -m 'create beta for phabricator test'
102 $ hg ci --addremove -m 'create beta for phabricator test'
103 adding beta
103 adding beta
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
105 c44b38f24a45 mapped to old nodes []
105 c44b38f24a45 mapped to old nodes []
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
108 new commits: ['a692622e6937']
108 new commits: ['a692622e6937']
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
110 $ unset HGENCODING
110 $ unset HGENCODING
111
111
112 The amend won't explode after posting a public commit. The local tag is left
112 The amend won't explode after posting a public commit. The local tag is left
113 behind to identify it.
113 behind to identify it.
114
114
115 $ echo 'public change' > beta
115 $ echo 'public change' > beta
116 $ hg ci -m 'create public change for phabricator testing'
116 $ hg ci -m 'create public change for phabricator testing'
117 $ hg phase --public .
117 $ hg phase --public .
118 $ echo 'draft change' > alpha
118 $ echo 'draft change' > alpha
119 $ hg ci -m 'create draft change for phabricator testing'
119 $ hg ci -m 'create draft change for phabricator testing'
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
123 warning: not updating public commit 2:7b4185ab5d16
123 warning: not updating public commit 2:7b4185ab5d16
124 new commits: ['3244dc4a3334']
124 new commits: ['3244dc4a3334']
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
126 $ hg tags -v
126 $ hg tags -v
127 tip 3:3244dc4a3334
127 tip 3:3244dc4a3334
128 D7917 2:7b4185ab5d16 local
128 D7917 2:7b4185ab5d16 local
129
129
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
131 > {
131 > {
132 > "constraints": {
132 > "constraints": {
133 > "isBot": true
133 > "isBot": true
134 > }
134 > }
135 > }
135 > }
136 > EOF
136 > EOF
137 {
137 {
138 "cursor": {
138 "cursor": {
139 "after": null,
139 "after": null,
140 "before": null,
140 "before": null,
141 "limit": 100,
141 "limit": 100,
142 "order": null
142 "order": null
143 },
143 },
144 "data": [],
144 "data": [],
145 "maps": {},
145 "maps": {},
146 "query": {
146 "query": {
147 "queryKey": null
147 "queryKey": null
148 }
148 }
149 }
149 }
150
150
151 Template keywords
151 Template keywords
152 $ hg log -T'{rev} {phabreview|json}\n'
152 $ hg log -T'{rev} {phabreview|json}\n'
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
157
157
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
159 3 https://phab.mercurial-scm.org/D7918 D7918
159 3 https://phab.mercurial-scm.org/D7918 D7918
160 2 https://phab.mercurial-scm.org/D7917 D7917
160 2 https://phab.mercurial-scm.org/D7917 D7917
161 1 https://phab.mercurial-scm.org/D7916 D7916
161 1 https://phab.mercurial-scm.org/D7916 D7916
162 0 https://phab.mercurial-scm.org/D7915 D7915
162 0 https://phab.mercurial-scm.org/D7915 D7915
163
163
164 Commenting when phabsending:
164 Commenting when phabsending:
165 $ echo comment > comment
165 $ echo comment > comment
166 $ hg ci --addremove -m "create comment for phabricator test"
166 $ hg ci --addremove -m "create comment for phabricator test"
167 adding comment
167 adding comment
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
169 D7919 - created - d5dddca9023d: create comment for phabricator test
169 D7919 - created - d5dddca9023d: create comment for phabricator test
170 new commits: ['f7db812bbe1d']
170 new commits: ['f7db812bbe1d']
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
172 $ echo comment2 >> comment
172 $ echo comment2 >> comment
173 $ hg ci --amend
173 $ hg ci --amend
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
176 1849d7828727 mapped to old nodes []
176 1849d7828727 mapped to old nodes []
177 D7919 - updated - 1849d7828727: create comment for phabricator test
177 D7919 - updated - 1849d7828727: create comment for phabricator test
178
178
179 Phabsending a skipped commit:
179 Phabsending a skipped commit:
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
181 1849d7828727 mapped to old nodes ['1849d7828727']
181 1849d7828727 mapped to old nodes ['1849d7828727']
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
183
183
184 Phabsend doesn't create an instability when rebasing existing revisions on top
184 Phabsend doesn't create an instability when rebasing existing revisions on top
185 of new revisions.
185 of new revisions.
186
186
187 $ hg init reorder
187 $ hg init reorder
188 $ cd reorder
188 $ cd reorder
189 $ cat >> .hg/hgrc <<EOF
189 $ cat >> .hg/hgrc <<EOF
190 > [phabricator]
190 > [phabricator]
191 > url = https://phab.mercurial-scm.org/
191 > url = https://phab.mercurial-scm.org/
192 > callsign = HG
192 > callsign = HG
193 > [experimental]
193 > [experimental]
194 > evolution = all
194 > evolution = all
195 > EOF
195 > EOF
196
196
197 $ echo "add" > file1.txt
197 $ echo "add" > file1.txt
198 $ hg ci -Aqm 'added'
198 $ hg ci -Aqm 'added'
199 $ echo "mod1" > file1.txt
199 $ echo "mod1" > file1.txt
200 $ hg ci -m 'modified 1'
200 $ hg ci -m 'modified 1'
201 $ echo "mod2" > file1.txt
201 $ echo "mod2" > file1.txt
202 $ hg ci -m 'modified 2'
202 $ hg ci -m 'modified 2'
203 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
203 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
204 D8433 - created - 5d3959e20d1d: modified 2
204 D8433 - created - 5d3959e20d1d: modified 2
205 new commits: ['2b4aa8a88d61']
205 new commits: ['2b4aa8a88d61']
206 $ hg log -G -T compact
206 $ hg log -G -T compact
207 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
207 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
208 | modified 2
208 | modified 2
209 |
209 |
210 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
210 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
211 | modified 1
211 | modified 1
212 |
212 |
213 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
213 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
214 added
214 added
215
215
216 Also check that it doesn't create more orphans outside of the stack
217
218 $ hg up -q 1
219 $ echo "mod3" > file1.txt
220 $ hg ci -m 'modified 3'
221 created new head
222 $ hg up -q 3
216 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
223 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
217 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
224 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
218 D8434 - created - d549263bcb2d: modified 1
225 D8434 - created - d549263bcb2d: modified 1
219 D8433 - updated - 2b4aa8a88d61: modified 2
226 D8433 - updated - 2b4aa8a88d61: modified 2
220 new commits: ['876a60d024de']
227 new commits: ['876a60d024de']
221 new commits: ['0c6523cb1d0f']
228 new commits: ['0c6523cb1d0f']
229 restabilizing 1eda4bf55021 as d2c78c3a3e01
222 $ hg log -G -T compact
230 $ hg log -G -T compact
223 @ 5[tip] 1dff6b051abf 1970-01-01 00:00 +0000 test
231 o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
224 | modified 2
232 | modified 3
225 |
233 |
226 o 4:0 eb3752621d45 1970-01-01 00:00 +0000 test
234 | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
235 |/ modified 2
236 |
237 o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
227 | modified 1
238 | modified 1
228 |
239 |
229 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
240 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
230 added
241 added
231
242
232 Posting obsolete commits is disallowed
243 Posting obsolete commits is disallowed
233
244
234 $ echo "mod3" > file1.txt
245 $ echo "mod3" > file1.txt
235 $ hg ci -m 'modified A'
246 $ hg ci -m 'modified A'
236 $ echo "mod4" > file1.txt
247 $ echo "mod4" > file1.txt
237 $ hg ci -m 'modified B'
248 $ hg ci -m 'modified B'
238
249
239 $ hg up '.^'
250 $ hg up '.^'
240 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
251 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
241 $ echo 'obsolete' > file1.txt
252 $ echo 'obsolete' > file1.txt
242 $ hg amend --config extensions.amend=
253 $ hg amend --config extensions.amend=
243 1 new orphan changesets
254 1 new orphan changesets
244 $ hg log -G
255 $ hg log -G
245 @ changeset: 8:8d83edb3cbac
256 @ changeset: 10:082be6c94150
246 | tag: tip
257 | tag: tip
247 | parent: 5:1dff6b051abf
258 | parent: 6:0c6523cb1d0f
248 | user: test
259 | user: test
249 | date: Thu Jan 01 00:00:00 1970 +0000
260 | date: Thu Jan 01 00:00:00 1970 +0000
250 | summary: modified A
261 | summary: modified A
251 |
262 |
252 | * changeset: 7:d4ea1b2e3511
263 | * changeset: 9:a67643f48146
253 | | user: test
264 | | user: test
254 | | date: Thu Jan 01 00:00:00 1970 +0000
265 | | date: Thu Jan 01 00:00:00 1970 +0000
255 | | instability: orphan
266 | | instability: orphan
256 | | summary: modified B
267 | | summary: modified B
257 | |
268 | |
258 | x changeset: 6:4635d7f0d1ff
269 | x changeset: 8:db79727cb2f7
259 |/ user: test
270 |/ parent: 6:0c6523cb1d0f
271 | user: test
260 | date: Thu Jan 01 00:00:00 1970 +0000
272 | date: Thu Jan 01 00:00:00 1970 +0000
261 | obsolete: rewritten using amend as 8:8d83edb3cbac
273 | obsolete: rewritten using amend as 10:082be6c94150
262 | summary: modified A
274 | summary: modified A
263 |
275 |
264 o changeset: 5:1dff6b051abf
276 | o changeset: 7:d2c78c3a3e01
265 | user: test
277 | | parent: 5:876a60d024de
278 | | user: test
279 | | date: Thu Jan 01 00:00:00 1970 +0000
280 | | summary: modified 3
281 | |
282 o | changeset: 6:0c6523cb1d0f
283 |/ user: test
266 | date: Thu Jan 01 00:00:00 1970 +0000
284 | date: Thu Jan 01 00:00:00 1970 +0000
267 | summary: modified 2
285 | summary: modified 2
268 |
286 |
269 o changeset: 4:eb3752621d45
287 o changeset: 5:876a60d024de
270 | parent: 0:5cbade24e0fa
288 | parent: 0:5cbade24e0fa
271 | user: test
289 | user: test
272 | date: Thu Jan 01 00:00:00 1970 +0000
290 | date: Thu Jan 01 00:00:00 1970 +0000
273 | summary: modified 1
291 | summary: modified 1
274 |
292 |
275 o changeset: 0:5cbade24e0fa
293 o changeset: 0:5cbade24e0fa
276 user: test
294 user: test
277 date: Thu Jan 01 00:00:00 1970 +0000
295 date: Thu Jan 01 00:00:00 1970 +0000
278 summary: added
296 summary: added
279
297
280 $ hg phabsend -r 5::
298 $ hg phabsend -r 5::
281 abort: obsolete commits cannot be posted for review
299 abort: obsolete commits cannot be posted for review
282 [255]
300 [255]
283
301
302 Don't restack existing orphans
303
304 $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
305 876a60d024de mapped to old nodes ['876a60d024de']
306 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
307 D8434 - updated - 876a60d024de: modified 1
308 D8433 - updated - 0c6523cb1d0f: modified 2
309 D8435 - created - 082be6c94150: modified A
310 new commits: ['b5913193c805']
311 not restabilizing unchanged d2c78c3a3e01
312 $ hg log -G
313 @ changeset: 11:b5913193c805
314 | tag: tip
315 | parent: 6:0c6523cb1d0f
316 | user: test
317 | date: Thu Jan 01 00:00:00 1970 +0000
318 | summary: modified A
319 |
320 | * changeset: 9:a67643f48146
321 | | user: test
322 | | date: Thu Jan 01 00:00:00 1970 +0000
323 | | instability: orphan
324 | | summary: modified B
325 | |
326 | x changeset: 8:db79727cb2f7
327 |/ parent: 6:0c6523cb1d0f
328 | user: test
329 | date: Thu Jan 01 00:00:00 1970 +0000
330 | obsolete: rewritten using amend, phabsend as 11:b5913193c805
331 | summary: modified A
332 |
333 | o changeset: 7:d2c78c3a3e01
334 | | parent: 5:876a60d024de
335 | | user: test
336 | | date: Thu Jan 01 00:00:00 1970 +0000
337 | | summary: modified 3
338 | |
339 o | changeset: 6:0c6523cb1d0f
340 |/ user: test
341 | date: Thu Jan 01 00:00:00 1970 +0000
342 | summary: modified 2
343 |
344 o changeset: 5:876a60d024de
345 | parent: 0:5cbade24e0fa
346 | user: test
347 | date: Thu Jan 01 00:00:00 1970 +0000
348 | summary: modified 1
349 |
350 o changeset: 0:5cbade24e0fa
351 user: test
352 date: Thu Jan 01 00:00:00 1970 +0000
353 summary: added
354
284 $ cd ..
355 $ cd ..
285
356
286 Phabesending a new binary, a modified binary, and a removed binary
357 Phabesending a new binary, a modified binary, and a removed binary
287
358
288 >>> open('bin', 'wb').write(b'\0a') and None
359 >>> open('bin', 'wb').write(b'\0a') and None
289 $ hg ci -Am 'add binary'
360 $ hg ci -Am 'add binary'
290 adding bin
361 adding bin
291 >>> open('bin', 'wb').write(b'\0b') and None
362 >>> open('bin', 'wb').write(b'\0b') and None
292 $ hg ci -m 'modify binary'
363 $ hg ci -m 'modify binary'
293 $ hg rm bin
364 $ hg rm bin
294 $ hg ci -m 'remove binary'
365 $ hg ci -m 'remove binary'
295 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
366 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
296 uploading bin@aa24a81f55de
367 uploading bin@aa24a81f55de
297 D8007 - created - aa24a81f55de: add binary
368 D8007 - created - aa24a81f55de: add binary
298 uploading bin@d8d62a881b54
369 uploading bin@d8d62a881b54
299 D8008 - created - d8d62a881b54: modify binary
370 D8008 - created - d8d62a881b54: modify binary
300 D8009 - created - af55645b2e29: remove binary
371 D8009 - created - af55645b2e29: remove binary
301 new commits: ['b8139fbb4a57']
372 new commits: ['b8139fbb4a57']
302 new commits: ['c88ce4c2d2ad']
373 new commits: ['c88ce4c2d2ad']
303 new commits: ['75dbbc901145']
374 new commits: ['75dbbc901145']
304 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
375 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
305
376
306 Phabsend a renamed binary and a copied binary, with and without content changes
377 Phabsend a renamed binary and a copied binary, with and without content changes
307 to src and dest
378 to src and dest
308
379
309 >>> open('bin2', 'wb').write(b'\0c') and None
380 >>> open('bin2', 'wb').write(b'\0c') and None
310 $ hg ci -Am 'add another binary'
381 $ hg ci -Am 'add another binary'
311 adding bin2
382 adding bin2
312
383
313 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
384 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
314 looks much different than when viewing "bin2_moved". No idea if this is a phab
385 looks much different than when viewing "bin2_moved". No idea if this is a phab
315 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
386 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
316 though.
387 though.
317
388
318 $ hg mv bin2 bin2_moved
389 $ hg mv bin2 bin2_moved
319 $ hg ci -m "moved binary"
390 $ hg ci -m "moved binary"
320
391
321 Note: "bin2_moved" is also not viewable in phabricator with this review
392 Note: "bin2_moved" is also not viewable in phabricator with this review
322
393
323 $ hg cp bin2_moved bin2_copied
394 $ hg cp bin2_moved bin2_copied
324 $ hg ci -m "copied binary"
395 $ hg ci -m "copied binary"
325
396
326 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
397 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
327 are viewable in their proper state. "bin2_copied" is not viewable, and not
398 are viewable in their proper state. "bin2_copied" is not viewable, and not
328 listed as binary in phabricator.
399 listed as binary in phabricator.
329
400
330 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
401 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
331 $ hg mv bin2_copied bin2_moved_again
402 $ hg mv bin2_copied bin2_moved_again
332 $ hg ci -m "move+mod copied binary"
403 $ hg ci -m "move+mod copied binary"
333
404
334 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
405 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
335 viewable on each side.
406 viewable on each side.
336
407
337 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
408 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
338 $ hg cp bin2_moved bin2_moved_copied
409 $ hg cp bin2_moved bin2_moved_copied
339 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
410 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
340 $ hg ci -m "copy+mod moved binary"
411 $ hg ci -m "copy+mod moved binary"
341
412
342 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
413 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
343 uploading bin2@f42f9195e00c
414 uploading bin2@f42f9195e00c
344 D8128 - created - f42f9195e00c: add another binary
415 D8128 - created - f42f9195e00c: add another binary
345 D8129 - created - 834ab31d80ae: moved binary
416 D8129 - created - 834ab31d80ae: moved binary
346 D8130 - created - 494b750e5194: copied binary
417 D8130 - created - 494b750e5194: copied binary
347 uploading bin2_moved_again@25f766b50cc2
418 uploading bin2_moved_again@25f766b50cc2
348 D8131 - created - 25f766b50cc2: move+mod copied binary
419 D8131 - created - 25f766b50cc2: move+mod copied binary
349 uploading bin2_moved_copied@1b87b363a5e4
420 uploading bin2_moved_copied@1b87b363a5e4
350 uploading bin2_moved@1b87b363a5e4
421 uploading bin2_moved@1b87b363a5e4
351 D8132 - created - 1b87b363a5e4: copy+mod moved binary
422 D8132 - created - 1b87b363a5e4: copy+mod moved binary
352 new commits: ['90437c20312a']
423 new commits: ['90437c20312a']
353 new commits: ['f391f4da4c61']
424 new commits: ['f391f4da4c61']
354 new commits: ['da86a9f3268c']
425 new commits: ['da86a9f3268c']
355 new commits: ['003ffc16ba66']
426 new commits: ['003ffc16ba66']
356 new commits: ['13bd750c36fa']
427 new commits: ['13bd750c36fa']
357 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
428 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
358
429
359 Phabreading a DREV with a local:commits time as a string:
430 Phabreading a DREV with a local:commits time as a string:
360 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
431 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
361 # HG changeset patch
432 # HG changeset patch
362 # User Pulkit Goyal <7895pulkit@gmail.com>
433 # User Pulkit Goyal <7895pulkit@gmail.com>
363 # Date 1509404054 -19800
434 # Date 1509404054 -19800
364 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
435 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
365 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
436 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
366 repoview: add a new attribute _visibilityexceptions and related API
437 repoview: add a new attribute _visibilityexceptions and related API
367
438
368 Currently we don't have a defined way in core to make some hidden revisions
439 Currently we don't have a defined way in core to make some hidden revisions
369 visible in filtered repo. Extensions to achieve the purpose of unhiding some
440 visible in filtered repo. Extensions to achieve the purpose of unhiding some
370 hidden commits, wrap repoview.pinnedrevs() function.
441 hidden commits, wrap repoview.pinnedrevs() function.
371
442
372 To make the above task simple and have well defined API, this patch adds a new
443 To make the above task simple and have well defined API, this patch adds a new
373 attribute '_visibilityexceptions' to repoview class which will contains
444 attribute '_visibilityexceptions' to repoview class which will contains
374 the hidden revs which should be exception.
445 the hidden revs which should be exception.
375 This will allow to set different exceptions for different repoview objects
446 This will allow to set different exceptions for different repoview objects
376 backed by the same unfiltered repo.
447 backed by the same unfiltered repo.
377
448
378 This patch also adds API to add revs to the attribute set and get them.
449 This patch also adds API to add revs to the attribute set and get them.
379
450
380 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
451 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
381
452
382 Differential Revision: https://phab.mercurial-scm.org/D1285
453 Differential Revision: https://phab.mercurial-scm.org/D1285
383 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
454 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
384 --- a/mercurial/repoview.py
455 --- a/mercurial/repoview.py
385 +++ b/mercurial/repoview.py
456 +++ b/mercurial/repoview.py
386 @@ * @@ (glob)
457 @@ * @@ (glob)
387 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
458 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
388 """
459 """
389
460
390 + # hidden revs which should be visible
461 + # hidden revs which should be visible
391 + _visibilityexceptions = set()
462 + _visibilityexceptions = set()
392 +
463 +
393 def __init__(self, repo, filtername):
464 def __init__(self, repo, filtername):
394 object.__setattr__(self, r'_unfilteredrepo', repo)
465 object.__setattr__(self, r'_unfilteredrepo', repo)
395 object.__setattr__(self, r'filtername', filtername)
466 object.__setattr__(self, r'filtername', filtername)
396 @@ -231,6 +234,14 @@
467 @@ -231,6 +234,14 @@
397 return self
468 return self
398 return self.unfiltered().filtered(name)
469 return self.unfiltered().filtered(name)
399
470
400 + def addvisibilityexceptions(self, revs):
471 + def addvisibilityexceptions(self, revs):
401 + """adds hidden revs which should be visible to set of exceptions"""
472 + """adds hidden revs which should be visible to set of exceptions"""
402 + self._visibilityexceptions.update(revs)
473 + self._visibilityexceptions.update(revs)
403 +
474 +
404 + def getvisibilityexceptions(self):
475 + def getvisibilityexceptions(self):
405 + """returns the set of hidden revs which should be visible"""
476 + """returns the set of hidden revs which should be visible"""
406 + return self._visibilityexceptions
477 + return self._visibilityexceptions
407 +
478 +
408 # everything access are forwarded to the proxied repo
479 # everything access are forwarded to the proxied repo
409 def __getattr__(self, attr):
480 def __getattr__(self, attr):
410 return getattr(self._unfilteredrepo, attr)
481 return getattr(self._unfilteredrepo, attr)
411 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
482 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
412 --- a/mercurial/localrepo.py
483 --- a/mercurial/localrepo.py
413 +++ b/mercurial/localrepo.py
484 +++ b/mercurial/localrepo.py
414 @@ -570,6 +570,14 @@
485 @@ -570,6 +570,14 @@
415 def close(self):
486 def close(self):
416 self._writecaches()
487 self._writecaches()
417
488
418 + def addvisibilityexceptions(self, exceptions):
489 + def addvisibilityexceptions(self, exceptions):
419 + # should be called on a filtered repository
490 + # should be called on a filtered repository
420 + pass
491 + pass
421 +
492 +
422 + def getvisibilityexceptions(self):
493 + def getvisibilityexceptions(self):
423 + # should be called on a filtered repository
494 + # should be called on a filtered repository
424 + return set()
495 + return set()
425 +
496 +
426 def _loadextensions(self):
497 def _loadextensions(self):
427 extensions.loadall(self.ui)
498 extensions.loadall(self.ui)
428
499
429
500
430 A bad .arcconfig doesn't error out
501 A bad .arcconfig doesn't error out
431 $ echo 'garbage' > .arcconfig
502 $ echo 'garbage' > .arcconfig
432 $ hg config phabricator --debug
503 $ hg config phabricator --debug
433 invalid JSON in $TESTTMP/repo/.arcconfig
504 invalid JSON in $TESTTMP/repo/.arcconfig
434 read config from: */.hgrc (glob)
505 read config from: */.hgrc (glob)
435 */.hgrc:*: phabricator.debug=True (glob)
506 */.hgrc:*: phabricator.debug=True (glob)
436 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
507 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
437 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
508 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
438
509
439 The .arcconfig content overrides global config
510 The .arcconfig content overrides global config
440 $ cat >> $HGRCPATH << EOF
511 $ cat >> $HGRCPATH << EOF
441 > [phabricator]
512 > [phabricator]
442 > url = global
513 > url = global
443 > callsign = global
514 > callsign = global
444 > EOF
515 > EOF
445 $ cp $TESTDIR/../.arcconfig .
516 $ cp $TESTDIR/../.arcconfig .
446 $ mv .hg/hgrc .hg/hgrc.bak
517 $ mv .hg/hgrc .hg/hgrc.bak
447 $ hg config phabricator --debug
518 $ hg config phabricator --debug
448 read config from: */.hgrc (glob)
519 read config from: */.hgrc (glob)
449 */.hgrc:*: phabricator.debug=True (glob)
520 */.hgrc:*: phabricator.debug=True (glob)
450 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
521 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
451 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
522 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
452
523
453 But it doesn't override local config
524 But it doesn't override local config
454 $ cat >> .hg/hgrc << EOF
525 $ cat >> .hg/hgrc << EOF
455 > [phabricator]
526 > [phabricator]
456 > url = local
527 > url = local
457 > callsign = local
528 > callsign = local
458 > EOF
529 > EOF
459 $ hg config phabricator --debug
530 $ hg config phabricator --debug
460 read config from: */.hgrc (glob)
531 read config from: */.hgrc (glob)
461 */.hgrc:*: phabricator.debug=True (glob)
532 */.hgrc:*: phabricator.debug=True (glob)
462 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
533 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
463 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
534 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
464 $ mv .hg/hgrc.bak .hg/hgrc
535 $ mv .hg/hgrc.bak .hg/hgrc
465
536
466 Phabimport works with a stack
537 Phabimport works with a stack
467
538
468 $ cd ..
539 $ cd ..
469 $ hg clone repo repo2 -qr 1
540 $ hg clone repo repo2 -qr 1
470 $ cp repo/.hg/hgrc repo2/.hg/
541 $ cp repo/.hg/hgrc repo2/.hg/
471 $ cd repo2
542 $ cd repo2
472 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
543 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
473 applying patch from D7917
544 applying patch from D7917
474 applying patch from D7918
545 applying patch from D7918
475 $ hg log -r .: -G -Tcompact
546 $ hg log -r .: -G -Tcompact
476 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
547 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
477 | create draft change for phabricator testing
548 | create draft change for phabricator testing
478 |
549 |
479 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
550 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
480 | create public change for phabricator testing
551 | create public change for phabricator testing
481 |
552 |
482 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
553 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
483 | create beta for phabricator test
554 | create beta for phabricator test
484 ~
555 ~
485 Phabimport can create secret commits
556 Phabimport can create secret commits
486
557
487 $ hg rollback --config ui.rollback=True
558 $ hg rollback --config ui.rollback=True
488 repository tip rolled back to revision 1 (undo phabimport)
559 repository tip rolled back to revision 1 (undo phabimport)
489 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
560 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
490 > --config phabimport.secret=True
561 > --config phabimport.secret=True
491 applying patch from D7917
562 applying patch from D7917
492 applying patch from D7918
563 applying patch from D7918
493 $ hg log -r 'reverse(.:)' -T phases
564 $ hg log -r 'reverse(.:)' -T phases
494 changeset: 3:aaef04066140
565 changeset: 3:aaef04066140
495 tag: tip
566 tag: tip
496 phase: secret
567 phase: secret
497 user: test
568 user: test
498 date: Thu Jan 01 00:00:00 1970 +0000
569 date: Thu Jan 01 00:00:00 1970 +0000
499 summary: create draft change for phabricator testing
570 summary: create draft change for phabricator testing
500
571
501 changeset: 2:8de3712202d1
572 changeset: 2:8de3712202d1
502 phase: secret
573 phase: secret
503 user: test
574 user: test
504 date: Thu Jan 01 00:00:00 1970 +0000
575 date: Thu Jan 01 00:00:00 1970 +0000
505 summary: create public change for phabricator testing
576 summary: create public change for phabricator testing
506
577
507 changeset: 1:a692622e6937
578 changeset: 1:a692622e6937
508 phase: public
579 phase: public
509 user: test
580 user: test
510 date: Thu Jan 01 00:00:00 1970 +0000
581 date: Thu Jan 01 00:00:00 1970 +0000
511 summary: create beta for phabricator test
582 summary: create beta for phabricator test
512
583
513 Phabimport accepts multiple DREVSPECs
584 Phabimport accepts multiple DREVSPECs
514
585
515 $ hg rollback --config ui.rollback=True
586 $ hg rollback --config ui.rollback=True
516 repository tip rolled back to revision 1 (undo phabimport)
587 repository tip rolled back to revision 1 (undo phabimport)
517 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
588 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
518 applying patch from D7917
589 applying patch from D7917
519 applying patch from D7918
590 applying patch from D7918
520
591
521 Validate arguments with --fold
592 Validate arguments with --fold
522
593
523 $ hg phabsend --fold -r 1
594 $ hg phabsend --fold -r 1
524 abort: cannot fold a single revision
595 abort: cannot fold a single revision
525 [255]
596 [255]
526 $ hg phabsend --fold --no-amend -r 1::
597 $ hg phabsend --fold --no-amend -r 1::
527 abort: cannot fold with --no-amend
598 abort: cannot fold with --no-amend
528 [255]
599 [255]
529 $ hg phabsend --fold -r 0+3
600 $ hg phabsend --fold -r 0+3
530 abort: cannot fold non-linear revisions
601 abort: cannot fold non-linear revisions
531 [255]
602 [255]
532 $ hg phabsend --fold -r 1::
603 $ hg phabsend --fold -r 1::
533 abort: cannot fold revisions with different DREV values
604 abort: cannot fold revisions with different DREV values
534 [255]
605 [255]
535
606
536 Setup a series of commits to be folded, and include the Test Plan field multiple
607 Setup a series of commits to be folded, and include the Test Plan field multiple
537 times to test the concatenation logic. No Test Plan field in the last one to
608 times to test the concatenation logic. No Test Plan field in the last one to
538 ensure missing fields are skipped.
609 ensure missing fields are skipped.
539
610
540 $ hg init ../folded
611 $ hg init ../folded
541 $ cd ../folded
612 $ cd ../folded
542 $ cat >> .hg/hgrc <<EOF
613 $ cat >> .hg/hgrc <<EOF
543 > [phabricator]
614 > [phabricator]
544 > url = https://phab.mercurial-scm.org/
615 > url = https://phab.mercurial-scm.org/
545 > callsign = HG
616 > callsign = HG
546 > EOF
617 > EOF
547
618
548 $ echo 'added' > file.txt
619 $ echo 'added' > file.txt
549 $ hg ci -Aqm 'added file'
620 $ hg ci -Aqm 'added file'
550
621
551 $ cat > log.txt <<EOF
622 $ cat > log.txt <<EOF
552 > one: first commit to review
623 > one: first commit to review
553 >
624 >
554 > This file was modified with 'mod1' as its contents.
625 > This file was modified with 'mod1' as its contents.
555 >
626 >
556 > Test Plan:
627 > Test Plan:
557 > LOL! What testing?!
628 > LOL! What testing?!
558 > EOF
629 > EOF
559 $ echo mod1 > file.txt
630 $ echo mod1 > file.txt
560 $ hg ci -l log.txt
631 $ hg ci -l log.txt
561
632
562 $ cat > log.txt <<EOF
633 $ cat > log.txt <<EOF
563 > two: second commit to review
634 > two: second commit to review
564 >
635 >
565 > This file was modified with 'mod2' as its contents.
636 > This file was modified with 'mod2' as its contents.
566 >
637 >
567 > Test Plan:
638 > Test Plan:
568 > Haha! yeah, right.
639 > Haha! yeah, right.
569 >
640 >
570 > EOF
641 > EOF
571 $ echo mod2 > file.txt
642 $ echo mod2 > file.txt
572 $ hg ci -l log.txt
643 $ hg ci -l log.txt
573
644
574 $ echo mod3 > file.txt
645 $ echo mod3 > file.txt
575 $ hg ci -m '3: a commit with no detailed message'
646 $ hg ci -m '3: a commit with no detailed message'
576
647
577 The folding of immutable commits works...
648 The folding of immutable commits works...
578
649
579 $ hg phase -r tip --public
650 $ hg phase -r tip --public
580 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
651 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
581 D8386 - created - a959a3f69d8d: one: first commit to review
652 D8386 - created - a959a3f69d8d: one: first commit to review
582 D8386 - created - 24a4438154ba: two: second commit to review
653 D8386 - created - 24a4438154ba: two: second commit to review
583 D8386 - created - d235829e802c: 3: a commit with no detailed message
654 D8386 - created - d235829e802c: 3: a commit with no detailed message
584 warning: not updating public commit 1:a959a3f69d8d
655 warning: not updating public commit 1:a959a3f69d8d
585 warning: not updating public commit 2:24a4438154ba
656 warning: not updating public commit 2:24a4438154ba
586 warning: not updating public commit 3:d235829e802c
657 warning: not updating public commit 3:d235829e802c
587 no newnodes to update
658 no newnodes to update
588
659
589 $ hg phase -r 0 --draft --force
660 $ hg phase -r 0 --draft --force
590
661
591 ... as does the initial mutable fold...
662 ... as does the initial mutable fold...
592
663
593 $ echo y | hg phabsend --fold --confirm -r 1:: \
664 $ echo y | hg phabsend --fold --confirm -r 1:: \
594 > --test-vcr "$VCR/phabsend-fold-initial.json"
665 > --test-vcr "$VCR/phabsend-fold-initial.json"
595 NEW - a959a3f69d8d: one: first commit to review
666 NEW - a959a3f69d8d: one: first commit to review
596 NEW - 24a4438154ba: two: second commit to review
667 NEW - 24a4438154ba: two: second commit to review
597 NEW - d235829e802c: 3: a commit with no detailed message
668 NEW - d235829e802c: 3: a commit with no detailed message
598 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
669 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
599 D8387 - created - a959a3f69d8d: one: first commit to review
670 D8387 - created - a959a3f69d8d: one: first commit to review
600 D8387 - created - 24a4438154ba: two: second commit to review
671 D8387 - created - 24a4438154ba: two: second commit to review
601 D8387 - created - d235829e802c: 3: a commit with no detailed message
672 D8387 - created - d235829e802c: 3: a commit with no detailed message
602 updating local commit list for D8387
673 updating local commit list for D8387
603 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
674 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
604 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
675 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
605
676
606 ... and doesn't mangle the local commits.
677 ... and doesn't mangle the local commits.
607
678
608 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
679 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
609 3:921f8265efbd
680 3:921f8265efbd
610 3: a commit with no detailed message
681 3: a commit with no detailed message
611
682
612 Differential Revision: https://phab.mercurial-scm.org/D8387
683 Differential Revision: https://phab.mercurial-scm.org/D8387
613 2:832553266fe8
684 2:832553266fe8
614 two: second commit to review
685 two: second commit to review
615
686
616 This file was modified with 'mod2' as its contents.
687 This file was modified with 'mod2' as its contents.
617
688
618 Test Plan:
689 Test Plan:
619 Haha! yeah, right.
690 Haha! yeah, right.
620
691
621 Differential Revision: https://phab.mercurial-scm.org/D8387
692 Differential Revision: https://phab.mercurial-scm.org/D8387
622 1:602c4e738243
693 1:602c4e738243
623 one: first commit to review
694 one: first commit to review
624
695
625 This file was modified with 'mod1' as its contents.
696 This file was modified with 'mod1' as its contents.
626
697
627 Test Plan:
698 Test Plan:
628 LOL! What testing?!
699 LOL! What testing?!
629
700
630 Differential Revision: https://phab.mercurial-scm.org/D8387
701 Differential Revision: https://phab.mercurial-scm.org/D8387
631 0:98d480e0d494
702 0:98d480e0d494
632 added file
703 added file
633
704
634 Setup some obsmarkers by adding a file to the middle commit. This stress tests
705 Setup some obsmarkers by adding a file to the middle commit. This stress tests
635 getoldnodedrevmap() in later phabsends.
706 getoldnodedrevmap() in later phabsends.
636
707
637 $ hg up '.^'
708 $ hg up '.^'
638 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
709 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
639 $ echo 'modified' > file2.txt
710 $ echo 'modified' > file2.txt
640 $ hg add file2.txt
711 $ hg add file2.txt
641 $ hg amend --config experimental.evolution=all --config extensions.amend=
712 $ hg amend --config experimental.evolution=all --config extensions.amend=
642 1 new orphan changesets
713 1 new orphan changesets
643 $ hg up 3
714 $ hg up 3
644 obsolete feature not enabled but 1 markers found!
715 obsolete feature not enabled but 1 markers found!
645 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
716 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
646 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
717 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
647 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
718 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
648 rebasing 3:921f8265efbd "3: a commit with no detailed message"
719 rebasing 3:921f8265efbd "3: a commit with no detailed message"
649
720
650 When commits have changed locally, the local commit list on Phabricator is
721 When commits have changed locally, the local commit list on Phabricator is
651 updated.
722 updated.
652
723
653 $ echo y | hg phabsend --fold --confirm -r 1:: \
724 $ echo y | hg phabsend --fold --confirm -r 1:: \
654 > --test-vcr "$VCR/phabsend-fold-updated.json"
725 > --test-vcr "$VCR/phabsend-fold-updated.json"
655 obsolete feature not enabled but 2 markers found!
726 obsolete feature not enabled but 2 markers found!
656 602c4e738243 mapped to old nodes ['602c4e738243']
727 602c4e738243 mapped to old nodes ['602c4e738243']
657 0124e5474c88 mapped to old nodes ['832553266fe8']
728 0124e5474c88 mapped to old nodes ['832553266fe8']
658 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
729 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
659 D8387 - 602c4e738243: one: first commit to review
730 D8387 - 602c4e738243: one: first commit to review
660 D8387 - 0124e5474c88: two: second commit to review
731 D8387 - 0124e5474c88: two: second commit to review
661 D8387 - e4edb1fe3565: 3: a commit with no detailed message
732 D8387 - e4edb1fe3565: 3: a commit with no detailed message
662 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
733 Send the above changes to https://phab.mercurial-scm.org/ (yn)? y
663 D8387 - updated - 602c4e738243: one: first commit to review
734 D8387 - updated - 602c4e738243: one: first commit to review
664 D8387 - updated - 0124e5474c88: two: second commit to review
735 D8387 - updated - 0124e5474c88: two: second commit to review
665 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
736 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
666 obsolete feature not enabled but 2 markers found! (?)
737 obsolete feature not enabled but 2 markers found! (?)
667 updating local commit list for D8387
738 updating local commit list for D8387
668 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
739 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
669 $ hg log -Tcompact
740 $ hg log -Tcompact
670 obsolete feature not enabled but 2 markers found!
741 obsolete feature not enabled but 2 markers found!
671 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
742 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
672 3: a commit with no detailed message
743 3: a commit with no detailed message
673
744
674 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
745 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
675 two: second commit to review
746 two: second commit to review
676
747
677 1 602c4e738243 1970-01-01 00:00 +0000 test
748 1 602c4e738243 1970-01-01 00:00 +0000 test
678 one: first commit to review
749 one: first commit to review
679
750
680 0 98d480e0d494 1970-01-01 00:00 +0000 test
751 0 98d480e0d494 1970-01-01 00:00 +0000 test
681 added file
752 added file
682
753
683 When nothing has changed locally since the last phabsend, the commit list isn't
754 When nothing has changed locally since the last phabsend, the commit list isn't
684 updated, and nothing is changed locally afterward.
755 updated, and nothing is changed locally afterward.
685
756
686 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
757 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
687 obsolete feature not enabled but 2 markers found!
758 obsolete feature not enabled but 2 markers found!
688 602c4e738243 mapped to old nodes ['602c4e738243']
759 602c4e738243 mapped to old nodes ['602c4e738243']
689 0124e5474c88 mapped to old nodes ['0124e5474c88']
760 0124e5474c88 mapped to old nodes ['0124e5474c88']
690 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
761 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
691 D8387 - updated - 602c4e738243: one: first commit to review
762 D8387 - updated - 602c4e738243: one: first commit to review
692 D8387 - updated - 0124e5474c88: two: second commit to review
763 D8387 - updated - 0124e5474c88: two: second commit to review
693 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
764 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
694 obsolete feature not enabled but 2 markers found! (?)
765 obsolete feature not enabled but 2 markers found! (?)
695 local commit list for D8387 is already up-to-date
766 local commit list for D8387 is already up-to-date
696 $ hg log -Tcompact
767 $ hg log -Tcompact
697 obsolete feature not enabled but 2 markers found!
768 obsolete feature not enabled but 2 markers found!
698 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
769 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
699 3: a commit with no detailed message
770 3: a commit with no detailed message
700
771
701 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
772 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
702 two: second commit to review
773 two: second commit to review
703
774
704 1 602c4e738243 1970-01-01 00:00 +0000 test
775 1 602c4e738243 1970-01-01 00:00 +0000 test
705 one: first commit to review
776 one: first commit to review
706
777
707 0 98d480e0d494 1970-01-01 00:00 +0000 test
778 0 98d480e0d494 1970-01-01 00:00 +0000 test
708 added file
779 added file
709
780
710 Fold will accept new revisions at the end...
781 Fold will accept new revisions at the end...
711
782
712 $ echo 'another mod' > file2.txt
783 $ echo 'another mod' > file2.txt
713 $ hg ci -m 'four: extend the fold range'
784 $ hg ci -m 'four: extend the fold range'
714 obsolete feature not enabled but 2 markers found!
785 obsolete feature not enabled but 2 markers found!
715 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
786 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
716 > --config experimental.evolution=all
787 > --config experimental.evolution=all
717 602c4e738243 mapped to old nodes ['602c4e738243']
788 602c4e738243 mapped to old nodes ['602c4e738243']
718 0124e5474c88 mapped to old nodes ['0124e5474c88']
789 0124e5474c88 mapped to old nodes ['0124e5474c88']
719 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
790 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
720 D8387 - updated - 602c4e738243: one: first commit to review
791 D8387 - updated - 602c4e738243: one: first commit to review
721 D8387 - updated - 0124e5474c88: two: second commit to review
792 D8387 - updated - 0124e5474c88: two: second commit to review
722 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
793 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
723 D8387 - created - 94aaae213b23: four: extend the fold range
794 D8387 - created - 94aaae213b23: four: extend the fold range
724 updating local commit list for D8387
795 updating local commit list for D8387
725 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
796 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
726 $ hg log -r . -T '{desc}\n'
797 $ hg log -r . -T '{desc}\n'
727 four: extend the fold range
798 four: extend the fold range
728
799
729 Differential Revision: https://phab.mercurial-scm.org/D8387
800 Differential Revision: https://phab.mercurial-scm.org/D8387
730 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
801 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
731 obsolete feature not enabled but 3 markers found!
802 obsolete feature not enabled but 3 markers found!
732 1 https://phab.mercurial-scm.org/D8387 D8387
803 1 https://phab.mercurial-scm.org/D8387 D8387
733 4 https://phab.mercurial-scm.org/D8387 D8387
804 4 https://phab.mercurial-scm.org/D8387 D8387
734 5 https://phab.mercurial-scm.org/D8387 D8387
805 5 https://phab.mercurial-scm.org/D8387 D8387
735 7 https://phab.mercurial-scm.org/D8387 D8387
806 7 https://phab.mercurial-scm.org/D8387 D8387
736
807
737 ... and also accepts new revisions at the beginning of the range
808 ... and also accepts new revisions at the beginning of the range
738
809
739 It's a bit unfortunate that not having a Differential URL on the first commit
810 It's a bit unfortunate that not having a Differential URL on the first commit
740 causes a new Differential Revision to be created, though it isn't *entirely*
811 causes a new Differential Revision to be created, though it isn't *entirely*
741 unreasonable. At least this updates the subsequent commits.
812 unreasonable. At least this updates the subsequent commits.
742
813
743 TODO: See if it can reuse the existing Differential.
814 TODO: See if it can reuse the existing Differential.
744
815
745 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
816 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
746 > --config experimental.evolution=all
817 > --config experimental.evolution=all
747 602c4e738243 mapped to old nodes ['602c4e738243']
818 602c4e738243 mapped to old nodes ['602c4e738243']
748 0124e5474c88 mapped to old nodes ['0124e5474c88']
819 0124e5474c88 mapped to old nodes ['0124e5474c88']
749 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
820 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
750 51a04fea8707 mapped to old nodes ['51a04fea8707']
821 51a04fea8707 mapped to old nodes ['51a04fea8707']
751 D8388 - created - 98d480e0d494: added file
822 D8388 - created - 98d480e0d494: added file
752 D8388 - updated - 602c4e738243: one: first commit to review
823 D8388 - updated - 602c4e738243: one: first commit to review
753 D8388 - updated - 0124e5474c88: two: second commit to review
824 D8388 - updated - 0124e5474c88: two: second commit to review
754 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
825 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
755 D8388 - updated - 51a04fea8707: four: extend the fold range
826 D8388 - updated - 51a04fea8707: four: extend the fold range
756 updating local commit list for D8388
827 updating local commit list for D8388
757 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
828 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
758
829
759 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
830 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
760 obsolete feature not enabled but 8 markers found!
831 obsolete feature not enabled but 8 markers found!
761 12:ac7db67f0991
832 12:ac7db67f0991
762 four: extend the fold range
833 four: extend the fold range
763
834
764 Differential Revision: https://phab.mercurial-scm.org/D8388
835 Differential Revision: https://phab.mercurial-scm.org/D8388
765 11:30682b960804
836 11:30682b960804
766 3: a commit with no detailed message
837 3: a commit with no detailed message
767
838
768 Differential Revision: https://phab.mercurial-scm.org/D8388
839 Differential Revision: https://phab.mercurial-scm.org/D8388
769 10:3ee132d41dbc
840 10:3ee132d41dbc
770 two: second commit to review
841 two: second commit to review
771
842
772 This file was modified with 'mod2' as its contents.
843 This file was modified with 'mod2' as its contents.
773
844
774 Test Plan:
845 Test Plan:
775 Haha! yeah, right.
846 Haha! yeah, right.
776
847
777 Differential Revision: https://phab.mercurial-scm.org/D8388
848 Differential Revision: https://phab.mercurial-scm.org/D8388
778 9:6320b7d714cf
849 9:6320b7d714cf
779 one: first commit to review
850 one: first commit to review
780
851
781 This file was modified with 'mod1' as its contents.
852 This file was modified with 'mod1' as its contents.
782
853
783 Test Plan:
854 Test Plan:
784 LOL! What testing?!
855 LOL! What testing?!
785
856
786 Differential Revision: https://phab.mercurial-scm.org/D8388
857 Differential Revision: https://phab.mercurial-scm.org/D8388
787 8:15e9b14b4b4c
858 8:15e9b14b4b4c
788 added file
859 added file
789
860
790 Differential Revision: https://phab.mercurial-scm.org/D8388
861 Differential Revision: https://phab.mercurial-scm.org/D8388
791
862
792 Test phabsend --fold with an `hg split` at the end of the range
863 Test phabsend --fold with an `hg split` at the end of the range
793
864
794 $ echo foo > file3.txt
865 $ echo foo > file3.txt
795 $ hg add file3.txt
866 $ hg add file3.txt
796
867
797 $ hg log -r . -T '{desc}' > log.txt
868 $ hg log -r . -T '{desc}' > log.txt
798 $ echo 'amended mod' > file2.txt
869 $ echo 'amended mod' > file2.txt
799 $ hg ci --amend -l log.txt --config experimental.evolution=all
870 $ hg ci --amend -l log.txt --config experimental.evolution=all
800
871
801 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
872 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
802 > --config experimental.evolution=all split -r .
873 > --config experimental.evolution=all split -r .
803 > n
874 > n
804 > y
875 > y
805 > y
876 > y
806 > y
877 > y
807 > y
878 > y
808 > EOF
879 > EOF
809 diff --git a/file2.txt b/file2.txt
880 diff --git a/file2.txt b/file2.txt
810 1 hunks, 1 lines changed
881 1 hunks, 1 lines changed
811 examine changes to 'file2.txt'?
882 examine changes to 'file2.txt'?
812 (enter ? for help) [Ynesfdaq?] n
883 (enter ? for help) [Ynesfdaq?] n
813
884
814 diff --git a/file3.txt b/file3.txt
885 diff --git a/file3.txt b/file3.txt
815 new file mode 100644
886 new file mode 100644
816 examine changes to 'file3.txt'?
887 examine changes to 'file3.txt'?
817 (enter ? for help) [Ynesfdaq?] y
888 (enter ? for help) [Ynesfdaq?] y
818
889
819 @@ -0,0 +1,1 @@
890 @@ -0,0 +1,1 @@
820 +foo
891 +foo
821 record change 2/2 to 'file3.txt'?
892 record change 2/2 to 'file3.txt'?
822 (enter ? for help) [Ynesfdaq?] y
893 (enter ? for help) [Ynesfdaq?] y
823
894
824 created new head
895 created new head
825 diff --git a/file2.txt b/file2.txt
896 diff --git a/file2.txt b/file2.txt
826 1 hunks, 1 lines changed
897 1 hunks, 1 lines changed
827 examine changes to 'file2.txt'?
898 examine changes to 'file2.txt'?
828 (enter ? for help) [Ynesfdaq?] y
899 (enter ? for help) [Ynesfdaq?] y
829
900
830 @@ -1,1 +1,1 @@
901 @@ -1,1 +1,1 @@
831 -modified
902 -modified
832 +amended mod
903 +amended mod
833 record this change to 'file2.txt'?
904 record this change to 'file2.txt'?
834 (enter ? for help) [Ynesfdaq?] y
905 (enter ? for help) [Ynesfdaq?] y
835
906
836 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
907 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
837 > --config experimental.evolution=all
908 > --config experimental.evolution=all
838 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
909 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
839 6320b7d714cf mapped to old nodes ['6320b7d714cf']
910 6320b7d714cf mapped to old nodes ['6320b7d714cf']
840 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
911 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
841 30682b960804 mapped to old nodes ['30682b960804']
912 30682b960804 mapped to old nodes ['30682b960804']
842 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
913 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
843 b50946d5e490 mapped to old nodes ['ac7db67f0991']
914 b50946d5e490 mapped to old nodes ['ac7db67f0991']
844 D8388 - updated - 15e9b14b4b4c: added file
915 D8388 - updated - 15e9b14b4b4c: added file
845 D8388 - updated - 6320b7d714cf: one: first commit to review
916 D8388 - updated - 6320b7d714cf: one: first commit to review
846 D8388 - updated - 3ee132d41dbc: two: second commit to review
917 D8388 - updated - 3ee132d41dbc: two: second commit to review
847 D8388 - updated - 30682b960804: 3: a commit with no detailed message
918 D8388 - updated - 30682b960804: 3: a commit with no detailed message
848 D8388 - updated - 6bc15dc99efd: four: extend the fold range
919 D8388 - updated - 6bc15dc99efd: four: extend the fold range
849 D8388 - updated - b50946d5e490: four: extend the fold range
920 D8388 - updated - b50946d5e490: four: extend the fold range
850 updating local commit list for D8388
921 updating local commit list for D8388
851 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
922 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
852
923
853 Test phabsend --fold with an `hg fold` at the end of the range
924 Test phabsend --fold with an `hg fold` at the end of the range
854
925
855 $ hg --config experimental.evolution=all --config extensions.rebase= \
926 $ hg --config experimental.evolution=all --config extensions.rebase= \
856 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
927 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
857 rebasing 14:6bc15dc99efd "four: extend the fold range"
928 rebasing 14:6bc15dc99efd "four: extend the fold range"
858 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
929 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
859
930
860 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
931 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
861 > --config experimental.evolution=all
932 > --config experimental.evolution=all
862 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
933 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
863 6320b7d714cf mapped to old nodes ['6320b7d714cf']
934 6320b7d714cf mapped to old nodes ['6320b7d714cf']
864 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
935 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
865 30682b960804 mapped to old nodes ['30682b960804']
936 30682b960804 mapped to old nodes ['30682b960804']
866 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
937 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
867 D8388 - updated - 15e9b14b4b4c: added file
938 D8388 - updated - 15e9b14b4b4c: added file
868 D8388 - updated - 6320b7d714cf: one: first commit to review
939 D8388 - updated - 6320b7d714cf: one: first commit to review
869 D8388 - updated - 3ee132d41dbc: two: second commit to review
940 D8388 - updated - 3ee132d41dbc: two: second commit to review
870 D8388 - updated - 30682b960804: 3: a commit with no detailed message
941 D8388 - updated - 30682b960804: 3: a commit with no detailed message
871 D8388 - updated - e919cdf3d4fe: four: extend the fold range
942 D8388 - updated - e919cdf3d4fe: four: extend the fold range
872 updating local commit list for D8388
943 updating local commit list for D8388
873 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
944 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
874
945
875 $ hg log -r tip -v
946 $ hg log -r tip -v
876 obsolete feature not enabled but 12 markers found!
947 obsolete feature not enabled but 12 markers found!
877 changeset: 16:e919cdf3d4fe
948 changeset: 16:e919cdf3d4fe
878 tag: tip
949 tag: tip
879 parent: 11:30682b960804
950 parent: 11:30682b960804
880 user: test
951 user: test
881 date: Thu Jan 01 00:00:00 1970 +0000
952 date: Thu Jan 01 00:00:00 1970 +0000
882 files: file2.txt file3.txt
953 files: file2.txt file3.txt
883 description:
954 description:
884 four: extend the fold range
955 four: extend the fold range
885
956
886 Differential Revision: https://phab.mercurial-scm.org/D8388
957 Differential Revision: https://phab.mercurial-scm.org/D8388
887
958
888
959
889
960
890 $ cd ..
961 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now