##// END OF EJS Templates
phabricator: add a `phabimport` command...
Matt Harbison -
r45039:bbb170f9 default
parent child Browse files
Show More
@@ -0,0 +1,277 b''
1 {
2 "version": 1,
3 "interactions": [
4 {
5 "request": {
6 "body": "__conduit__=1&output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B7906%2C+7907%2C+7908%2C+7909%2C+7910%2C+7911%2C+7912%2C+7913%2C+7914%2C+7915%2C+7916%2C+7917%2C+7918%5D%7D",
7 "uri": "https://phab.mercurial-scm.org//api/differential.query",
8 "headers": {
9 "content-length": [
10 "242"
11 ],
12 "host": [
13 "phab.mercurial-scm.org"
14 ],
15 "accept": [
16 "application/mercurial-0.1"
17 ],
18 "user-agent": [
19 "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
20 ],
21 "content-type": [
22 "application/x-www-form-urlencoded"
23 ]
24 },
25 "method": "POST"
26 },
27 "response": {
28 "status": {
29 "message": "OK",
30 "code": 200
31 },
32 "headers": {
33 "expires": [
34 "Sat, 01 Jan 2000 00:00:00 GMT"
35 ],
36 "transfer-encoding": [
37 "chunked"
38 ],
39 "x-content-type-options": [
40 "nosniff"
41 ],
42 "x-frame-options": [
43 "Deny"
44 ],
45 "cache-control": [
46 "no-store"
47 ],
48 "content-type": [
49 "application/json"
50 ],
51 "server": [
52 "Apache/2.4.10 (Debian)"
53 ],
54 "strict-transport-security": [
55 "max-age=0; includeSubdomains; preload"
56 ],
57 "date": [
58 "Sun, 16 Feb 2020 20:45:32 GMT"
59 ],
60 "referrer-policy": [
61 "no-referrer"
62 ],
63 "x-xss-protection": [
64 "1; mode=block"
65 ]
66 },
67 "body": {
68 "string": "{\"result\":[{\"id\":\"7914\",\"phid\":\"PHID-DREV-u3iz5rww54i5jrsksnr3\",\"title\":\"rust-matchers: implement `visit_children_set` for `FileMatcher`\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7914\",\"dateCreated\":\"1579212591\",\"dateModified\":\"1581399130\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":147,\"lines.removed\":5,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"As per the removed inline comment, this will become useful in a future patch\\nin this series as the `IncludeMatcher` is introduced.\",\"testPlan\":\"\",\"lineCount\":\"152\",\"activeDiffPHID\":\"PHID-DIFF-n6cmaq4iwcetzbkkjvje\",\"diffs\":[\"20146\",\"19388\",\"19387\"],\"commits\":[\"PHID-CMIT-zdugtywectjyslokpg45\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-rskbts6c2kyknc66vlzt\",\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-lii2vixihcpnnjss3bzp\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7907\",\"phid\":\"PHID-DREV-jjmiq6h4ychdtvqh3aqu\",\"title\":\"rebase: always be graft-like, not merge-like, also for merges\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7907\",\"dateCreated\":\"1579162215\",\"dateModified\":\"1581387772\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":37,\"lines.removed\":96,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Rebase works by updating to a commit and then grafting changes on\\ntop. However, before this patch, it would actually merge in changes\\ninstead of grafting them in in some cases. That is, it would use the\\ncommon ancestor as base instead of using one of the parents. That\\nseems wrong to me, so I'm changing it so `defineparents()` always\\nreturns a value for `base`.\\n\\nThis fixes the bad behavior in test-rebase-newancestor.t, which was\\nintroduced in 65f215ea3e8e (tests: add test for rebasing merges with\\nancestors of the rebase destination, 2014-11-30).\\n\\nThe difference in test-rebase-dest.t is because the files in the tip\\nrevision were A, D, E, F before this patch and A, D, F, G after it. I\\nthink both files should ideally be there.\",\"testPlan\":\"\",\"lineCount\":\"133\",\"activeDiffPHID\":\"PHID-DIFF-xo54almrs3aipnwsjrju\",\"diffs\":[\"20131\",\"20093\",\"19858\",\"19699\",\"19377\",\"19343\"],\"commits\":[\"PHID-CMIT-bflrckeubx66y5jb3h2w\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-wyjh3r4pzmjaex6k5qtv\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7910\",\"phid\":\"PHID-DREV-lii2vixihcpnnjss3bzp\",\"title\":\"rust-re2: add wrapper for calling Re2 from Rust\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7910\",\"dateCreated\":\"1579182899\",\"dateModified\":\"1581379671\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":195,\"lines.removed\":5,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"This assumes that Re2 is installed following Google's guide. I am not sure\\nhow we want to integrate it in the project, but I think a follow-up patch would\\nbe more appropriate for such work.\\nAs it stands, *not* having Re2 installed results in a compilation error, which\\nis a problem as it breaks install compatibility. Hence, this is gated behind\\na non-default `with-re2` compilation feature.\",\"testPlan\":\"\",\"lineCount\":\"200\",\"activeDiffPHID\":\"PHID-DIFF-hvxi3tvelg75fjugmca5\",\"diffs\":[\"20080\",\"20040\",\"19938\",\"19546\",\"19399\",\"19386\",\"19360\"],\"commits\":[\"PHID-CMIT-5tq5dqzc7uvuanxqr7ze\",\"PHID-CMIT-visqfpftvyutaadm73vj\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-2lpsl6btnf4lltwv7drt\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-xkbqk6xlntkrgqn4x62c\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7909\",\"phid\":\"PHID-DREV-xkbqk6xlntkrgqn4x62c\",\"title\":\"rust-filepatterns: add support for `include` and `subinclude` patterns\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7909\",\"dateCreated\":\"1579174385\",\"dateModified\":\"1581379668\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":129,\"lines.removed\":1,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"This prepares a future patch for `IncludeMatcher` on the road to bare\\n`hg status` support.\",\"testPlan\":\"\",\"lineCount\":\"130\",\"activeDiffPHID\":\"PHID-DIFF-rjff6a36zcgyoctyaacc\",\"diffs\":[\"20079\",\"20039\",\"19385\",\"19357\"],\"commits\":[\"PHID-CMIT-6egqfyiavkmaq3u6cy7f\",\"PHID-CMIT-5xl5pj2nijmojoenjv47\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-k74ndkbhbsjoh6vdf6ch\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7908\",\"phid\":\"PHID-DREV-k74ndkbhbsjoh6vdf6ch\",\"title\":\"rust-filepatterns: improve API and robustness for pattern files parsing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7908\",\"dateCreated\":\"1579170142\",\"dateModified\":\"1581379666\",\"authorPHID\":\"PHID-USER-7hh4j4mpuwlnzvkapvse\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":245,\"lines.removed\":65,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Within the next few patches we will be using this new API.\",\"testPlan\":\"\",\"lineCount\":\"310\",\"activeDiffPHID\":\"PHID-DIFF-e7c77er3c45mjtkuzmr4\",\"diffs\":[\"20078\",\"20038\",\"19384\",\"19356\",\"19355\"],\"commits\":[\"PHID-CMIT-adevfr2rleerktrzh2zw\",\"PHID-CMIT-2vgwhgqwxfn2x26thcgr\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-nqkdtlvq7nwcejrriivx\":\"PHID-USER-nqkdtlvq7nwcejrriivx\"},\"ccs\":[\"PHID-USER-cgcdlc6c3gpxapbmkwa2\",\"PHID-USER-nqkdtlvq7nwcejrriivx\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-du2y5nvrvr43bahbwaia\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7906\",\"phid\":\"PHID-DREV-wyjh3r4pzmjaex6k5qtv\",\"title\":\"rebase: define base in only place in defineparents()\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7906\",\"dateCreated\":\"1579162214\",\"dateModified\":\"1580483936\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":10,\"lines.removed\":10,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Just a little refactoring to prepare for the next patch.\",\"testPlan\":\"\",\"lineCount\":\"20\",\"activeDiffPHID\":\"PHID-DIFF-7ihtsunr2rq5htngocse\",\"diffs\":[\"19720\",\"19698\",\"19342\"],\"commits\":[\"PHID-CMIT-jgxpobg6eadntkxz5tpa\",\"PHID-CMIT-jpk5c6pkor7pm63ztmh5\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-34jnztnonbr4lhwuybwl\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-cknqk5y5i26nfwplj6a2\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7913\",\"phid\":\"PHID-DREV-s4borg2nl7ay2mskktwq\",\"title\":\"cext: fix compiler warning about sign changing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7913\",\"dateCreated\":\"1579207172\",\"dateModified\":\"1579709023\",\"authorPHID\":\"PHID-USER-5iutahkpkhvnxfimqjbk\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":6,\"lines.removed\":6,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"line.len is a Py_ssize_t, and we're casing to size_t (unsigned). On my compiler,\\nthis causes a warning to be emitted:\\n\\n```\\nmercurial\\/cext\\/manifest.c: In function 'pathlen':\\nmercurial\\/cext\\/manifest.c:48:44: warning: operand of ?: changes signedness from 'Py_ssize_t' {aka 'long int'} to 'long unsigned int' due to unsignedness of other operand [-Wsign-compare]\\n return (end) ? (size_t)(end - l-\\u003estart) : l-\\u003elen;\\n ^~~~~~\\n```\",\"testPlan\":\"\",\"lineCount\":\"12\",\"activeDiffPHID\":\"PHID-DIFF-otv6bgmiu242tgi62saw\",\"diffs\":[\"19406\",\"19380\"],\"commits\":[\"PHID-CMIT-z46nrlwhoumbuxp7f2hy\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-tzhaient733lwrlbcag5\":\"PHID-USER-tzhaient733lwrlbcag5\"},\"ccs\":[\"PHID-USER-qwhdxkyioew7vwvxqc2g\",\"PHID-USER-tzhaient733lwrlbcag5\",\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7911\",\"phid\":\"PHID-DREV-rjja25ytm3wz7p262cxd\",\"title\":\"examples: refer to nightly rustfmt in Windows-compatible way\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7911\",\"dateCreated\":\"1579192910\",\"dateModified\":\"1579274016\",\"authorPHID\":\"PHID-USER-rskbts6c2kyknc66vlzt\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1,\"wasAcceptedBeforeClose\":true},\"branch\":null,\"summary\":\"Thanks to Jun Wu for the tip. I found that the new form also gave\\nbetter error messages when the nightly rustfmt wasn't installed (it\\ntold me which command to run instead of just saying \\\"error: not a\\nfile: \\u003csome path\\u003e\\\").\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-xewewozhprr7tbym4sqx\",\"diffs\":[\"19408\",\"19376\"],\"commits\":[\"PHID-CMIT-zoorilx5m4ijcev7rp2z\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-34jnztnonbr4lhwuybwl\":\"PHID-USER-34jnztnonbr4lhwuybwl\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"},{\"id\":\"7918\",\"phid\":\"PHID-DREV-sfsckrwrwc77rdl3k5rz\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7918\",\"dateCreated\":\"1579221164\",\"dateModified\":\"1579222305\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":2},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-pqdlhei24n47fzeofjph\",\"diffs\":[\"19394\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-yhl3yvijs4jploa5iqm4\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7917\",\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7917\",\"dateCreated\":\"1579221160\",\"dateModified\":\"1579222286\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-e64weyerxtutv2jvj2dt\",\"diffs\":[\"19393\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7916\",\"phid\":\"PHID-DREV-nk73cg2l2oqfozxnw2i3\",\"title\":\"create beta for phabricator test\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7916\",\"dateCreated\":\"1579221145\",\"dateModified\":\"1579222261\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"1\",\"activeDiffPHID\":\"PHID-DIFF-vn5llgg5oh2rkzquipx4\",\"diffs\":[\"19392\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-3mzbavd2ajsbar5l3esr\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7915\",\"phid\":\"PHID-DREV-3mzbavd2ajsbar5l3esr\",\"title\":\"create alpha for phabricator test \\u20ac\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7915\",\"dateCreated\":\"1579221124\",\"dateModified\":\"1579222242\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":2,\"lines.removed\":0},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-fu7z4h6aahgcq2h2q33b\",\"diffs\":[\"19391\",\"19390\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7912\",\"phid\":\"PHID-DREV-6sl7k5ssqpiymujoeppg\",\"title\":\"py3: fix curses chunkselector fallback (when diffs are too large) on py3\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7912\",\"dateCreated\":\"1579206015\",\"dateModified\":\"1579211357\",\"authorPHID\":\"PHID-USER-5iutahkpkhvnxfimqjbk\",\"status\":\"3\",\"statusName\":\"Closed\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1,\"wasAcceptedBeforeClose\":false},\"branch\":null,\"summary\":\"Previously we showed the message using Exception.message, which is removed in\\npy3. Since crecordmod.fallbackerror inherits from error.Abort, we can just use\\n`b'%s' % exception` to print the message. This does not print the hint, but\\nthat's fine - we don't set one. We inherit from error.Abort so that if a\\ncodepath doesn't handle fallback specially, it exits to the terminal with a sane\\nmessage instead of an unknown exception error.\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-45onijfyde7kwtva3efa\",\"diffs\":[\"19381\",\"19379\"],\"commits\":[\"PHID-CMIT-i2qbhmmfpgrrkhubbr5v\"],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\"}],\"error_code\":null,\"error_info\":null}"
69 }
70 }
71 },
72 {
73 "request": {
74 "body": "__conduit__=1&output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B19393%2C+19394%5D%7D",
75 "uri": "https://phab.mercurial-scm.org//api/differential.querydiffs",
76 "headers": {
77 "content-length": [
78 "156"
79 ],
80 "host": [
81 "phab.mercurial-scm.org"
82 ],
83 "accept": [
84 "application/mercurial-0.1"
85 ],
86 "user-agent": [
87 "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
88 ],
89 "content-type": [
90 "application/x-www-form-urlencoded"
91 ]
92 },
93 "method": "POST"
94 },
95 "response": {
96 "status": {
97 "message": "OK",
98 "code": 200
99 },
100 "headers": {
101 "expires": [
102 "Sat, 01 Jan 2000 00:00:00 GMT"
103 ],
104 "transfer-encoding": [
105 "chunked"
106 ],
107 "x-content-type-options": [
108 "nosniff"
109 ],
110 "x-frame-options": [
111 "Deny"
112 ],
113 "cache-control": [
114 "no-store"
115 ],
116 "content-type": [
117 "application/json"
118 ],
119 "server": [
120 "Apache/2.4.10 (Debian)"
121 ],
122 "strict-transport-security": [
123 "max-age=0; includeSubdomains; preload"
124 ],
125 "date": [
126 "Sun, 16 Feb 2020 20:45:33 GMT"
127 ],
128 "referrer-policy": [
129 "no-referrer"
130 ],
131 "x-xss-protection": [
132 "1; mode=block"
133 ]
134 },
135 "body": {
136 "string": "{\"result\":{\"19394\":{\"id\":\"19394\",\"revisionID\":\"7918\",\"dateCreated\":\"1579221161\",\"dateModified\":\"1579221164\",\"sourceControlBaseRevision\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52927\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"sjHKTvwwqRoW\"},\"oldPath\":\"alpha\",\"currentPath\":\"alpha\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"2\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"2\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-alpha\\n-more\\n+draft change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parent\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"user\":\"test\"},\"local:commits\":{\"3244dc4a33342b4d91ad534ae091685244ac5ed4\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"3244dc4a33342b4d91ad534ae091685244ac5ed4\",\"parents\":[\"7b4185ab5d16acf98e41d566be38c5dbea10878d\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"},\"19393\":{\"id\":\"19393\",\"revisionID\":\"7917\",\"dateCreated\":\"1579221158\",\"dateModified\":\"1579221160\",\"sourceControlBaseRevision\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"sourceControlPath\":\"\\/\",\"sourceControlSystem\":\"hg\",\"branch\":\"default\",\"bookmark\":null,\"creationMethod\":\"phabsend\",\"description\":null,\"unitStatus\":\"0\",\"lintStatus\":\"0\",\"changes\":[{\"id\":\"52926\",\"metadata\":{\"line:first\":1,\"hash.effect\":\"uKa4JPWhh2di\"},\"oldPath\":\"beta\",\"currentPath\":\"beta\",\"awayPaths\":[],\"oldProperties\":[],\"newProperties\":[],\"type\":\"2\",\"fileType\":\"1\",\"commitHash\":null,\"addLines\":\"1\",\"delLines\":\"1\",\"hunks\":[{\"oldOffset\":\"1\",\"newOffset\":\"1\",\"oldLength\":\"1\",\"newLength\":\"1\",\"addLines\":null,\"delLines\":null,\"isMissingOldNewline\":null,\"isMissingNewNewline\":null,\"corpus\":\"-beta\\n+public change\\n\"}]}],\"properties\":{\"hg:meta\":{\"branch\":\"default\",\"date\":\"0 0\",\"node\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parent\":\"a692622e693757674f85ff481c7ff77057a7f82a\",\"user\":\"test\"},\"local:commits\":{\"7b4185ab5d16acf98e41d566be38c5dbea10878d\":{\"author\":\"test\",\"authorEmail\":\"test\",\"branch\":\"default\",\"commit\":\"7b4185ab5d16acf98e41d566be38c5dbea10878d\",\"parents\":[\"a692622e693757674f85ff481c7ff77057a7f82a\"],\"time\":0}}},\"authorName\":\"test\",\"authorEmail\":\"test\"}},\"error_code\":null,\"error_info\":null}"
137 }
138 }
139 },
140 {
141 "request": {
142 "body": "__conduit__=1&output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19393%7D",
143 "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
144 "headers": {
145 "content-length": [
146 "144"
147 ],
148 "host": [
149 "phab.mercurial-scm.org"
150 ],
151 "accept": [
152 "application/mercurial-0.1"
153 ],
154 "user-agent": [
155 "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
156 ],
157 "content-type": [
158 "application/x-www-form-urlencoded"
159 ]
160 },
161 "method": "POST"
162 },
163 "response": {
164 "status": {
165 "message": "OK",
166 "code": 200
167 },
168 "headers": {
169 "expires": [
170 "Sat, 01 Jan 2000 00:00:00 GMT"
171 ],
172 "transfer-encoding": [
173 "chunked"
174 ],
175 "x-content-type-options": [
176 "nosniff"
177 ],
178 "x-frame-options": [
179 "Deny"
180 ],
181 "cache-control": [
182 "no-store"
183 ],
184 "content-type": [
185 "application/json"
186 ],
187 "server": [
188 "Apache/2.4.10 (Debian)"
189 ],
190 "strict-transport-security": [
191 "max-age=0; includeSubdomains; preload"
192 ],
193 "date": [
194 "Sun, 16 Feb 2020 20:45:33 GMT"
195 ],
196 "referrer-policy": [
197 "no-referrer"
198 ],
199 "x-xss-protection": [
200 "1; mode=block"
201 ]
202 },
203 "body": {
204 "string": "{\"result\":\"diff --git a\\/beta b\\/beta\\n--- a\\/beta\\n+++ b\\/beta\\n@@ -1 +1 @@\\n-beta\\n+public change\\n\\n\",\"error_code\":null,\"error_info\":null}"
205 }
206 }
207 },
208 {
209 "request": {
210 "body": "__conduit__=1&output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22diffID%22%3A+19394%7D",
211 "uri": "https://phab.mercurial-scm.org//api/differential.getrawdiff",
212 "headers": {
213 "content-length": [
214 "144"
215 ],
216 "host": [
217 "phab.mercurial-scm.org"
218 ],
219 "accept": [
220 "application/mercurial-0.1"
221 ],
222 "user-agent": [
223 "mercurial/proto-1.0 (Mercurial 5.3+205-75107f1aa427+20200215)"
224 ],
225 "content-type": [
226 "application/x-www-form-urlencoded"
227 ]
228 },
229 "method": "POST"
230 },
231 "response": {
232 "status": {
233 "message": "OK",
234 "code": 200
235 },
236 "headers": {
237 "expires": [
238 "Sat, 01 Jan 2000 00:00:00 GMT"
239 ],
240 "transfer-encoding": [
241 "chunked"
242 ],
243 "x-content-type-options": [
244 "nosniff"
245 ],
246 "x-frame-options": [
247 "Deny"
248 ],
249 "cache-control": [
250 "no-store"
251 ],
252 "content-type": [
253 "application/json"
254 ],
255 "server": [
256 "Apache/2.4.10 (Debian)"
257 ],
258 "strict-transport-security": [
259 "max-age=0; includeSubdomains; preload"
260 ],
261 "date": [
262 "Sun, 16 Feb 2020 20:45:34 GMT"
263 ],
264 "referrer-policy": [
265 "no-referrer"
266 ],
267 "x-xss-protection": [
268 "1; mode=block"
269 ]
270 },
271 "body": {
272 "string": "{\"result\":\"diff --git a\\/alpha b\\/alpha\\n--- a\\/alpha\\n+++ b\\/alpha\\n@@ -1,2 +1 @@\\n-alpha\\n-more\\n+draft change\\n\\n\",\"error_code\":null,\"error_info\":null}"
273 }
274 }
275 }
276 ]
277 } No newline at end of file
@@ -1,1830 +1,1889 b''
1 1 # phabricator.py - simple Phabricator integration
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """simple Phabricator integration (EXPERIMENTAL)
8 8
9 9 This extension provides a ``phabsend`` command which sends a stack of
10 10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 12 to update statuses in batch.
13 13
14 14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 15 information of Phabricator differentials associated with unfinished
16 16 changesets.
17 17
18 18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 19 changeset from being sent. The requirement could be disabled by changing
20 20 ``differential.require-test-plan-field`` config server side.
21 21
22 22 Config::
23 23
24 24 [phabricator]
25 25 # Phabricator URL
26 26 url = https://phab.example.com/
27 27
28 28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 29 # callsign is "FOO".
30 30 callsign = FOO
31 31
32 32 # curl command to use. If not set (default), use builtin HTTP library to
33 33 # communicate. If set, use the specified curl command. This could be useful
34 34 # if you need to specify advanced options that is not easily supported by
35 35 # the internal library.
36 36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37 37
38 38 [auth]
39 39 example.schemes = https
40 40 example.prefix = phab.example.com
41 41
42 42 # API token. Get it from https://$HOST/conduit/login/
43 43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 44 """
45 45
46 46 from __future__ import absolute_import
47 47
48 48 import base64
49 49 import contextlib
50 50 import hashlib
51 51 import itertools
52 52 import json
53 53 import mimetypes
54 54 import operator
55 55 import re
56 56
57 57 from mercurial.node import bin, nullid
58 58 from mercurial.i18n import _
59 59 from mercurial.pycompat import getattr
60 60 from mercurial.thirdparty import attr
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 encoding,
65 65 error,
66 66 exthelper,
67 67 graphmod,
68 68 httpconnection as httpconnectionmod,
69 69 localrepo,
70 70 logcmdutil,
71 71 match,
72 72 mdiff,
73 73 obsutil,
74 74 parser,
75 75 patch,
76 76 phases,
77 77 pycompat,
78 78 scmutil,
79 79 smartset,
80 80 tags,
81 81 templatefilters,
82 82 templateutil,
83 83 url as urlmod,
84 84 util,
85 85 )
86 86 from mercurial.utils import (
87 87 procutil,
88 88 stringutil,
89 89 )
90 90 from . import show
91 91
92 92
93 93 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 94 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 95 # be specifying the version(s) of Mercurial they are tested with, or
96 96 # leave the attribute unspecified.
97 97 testedwith = b'ships-with-hg-core'
98 98
99 99 eh = exthelper.exthelper()
100 100
101 101 cmdtable = eh.cmdtable
102 102 command = eh.command
103 103 configtable = eh.configtable
104 104 templatekeyword = eh.templatekeyword
105 105 uisetup = eh.finaluisetup
106 106
107 107 # developer config: phabricator.batchsize
108 108 eh.configitem(
109 109 b'phabricator', b'batchsize', default=12,
110 110 )
111 111 eh.configitem(
112 112 b'phabricator', b'callsign', default=None,
113 113 )
114 114 eh.configitem(
115 115 b'phabricator', b'curlcmd', default=None,
116 116 )
117 117 # developer config: phabricator.repophid
118 118 eh.configitem(
119 119 b'phabricator', b'repophid', default=None,
120 120 )
121 121 eh.configitem(
122 122 b'phabricator', b'url', default=None,
123 123 )
124 124 eh.configitem(
125 125 b'phabsend', b'confirm', default=False,
126 126 )
127 127
128 128 colortable = {
129 129 b'phabricator.action.created': b'green',
130 130 b'phabricator.action.skipped': b'magenta',
131 131 b'phabricator.action.updated': b'magenta',
132 132 b'phabricator.desc': b'',
133 133 b'phabricator.drev': b'bold',
134 134 b'phabricator.node': b'',
135 135 b'phabricator.status.abandoned': b'magenta dim',
136 136 b'phabricator.status.accepted': b'green bold',
137 137 b'phabricator.status.closed': b'green',
138 138 b'phabricator.status.needsreview': b'yellow',
139 139 b'phabricator.status.needsrevision': b'red',
140 140 b'phabricator.status.changesplanned': b'red',
141 141 }
142 142
143 143 _VCR_FLAGS = [
144 144 (
145 145 b'',
146 146 b'test-vcr',
147 147 b'',
148 148 _(
149 149 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
150 150 b', otherwise will mock all http requests using the specified vcr file.'
151 151 b' (ADVANCED)'
152 152 ),
153 153 ),
154 154 ]
155 155
156 156
157 157 @eh.wrapfunction(localrepo, "loadhgrc")
158 158 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
159 159 """Load ``.arcconfig`` content into a ui instance on repository open.
160 160 """
161 161 result = False
162 162 arcconfig = {}
163 163
164 164 try:
165 165 # json.loads only accepts bytes from 3.6+
166 166 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
167 167 # json.loads only returns unicode strings
168 168 arcconfig = pycompat.rapply(
169 169 lambda x: encoding.unitolocal(x)
170 170 if isinstance(x, pycompat.unicode)
171 171 else x,
172 172 pycompat.json_loads(rawparams),
173 173 )
174 174
175 175 result = True
176 176 except ValueError:
177 177 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
178 178 except IOError:
179 179 pass
180 180
181 181 cfg = util.sortdict()
182 182
183 183 if b"repository.callsign" in arcconfig:
184 184 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
185 185
186 186 if b"phabricator.uri" in arcconfig:
187 187 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
188 188
189 189 if cfg:
190 190 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
191 191
192 192 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
193 193
194 194
195 195 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
196 196 fullflags = flags + _VCR_FLAGS
197 197
198 198 def hgmatcher(r1, r2):
199 199 if r1.uri != r2.uri or r1.method != r2.method:
200 200 return False
201 201 r1params = util.urlreq.parseqs(r1.body)
202 202 r2params = util.urlreq.parseqs(r2.body)
203 203 for key in r1params:
204 204 if key not in r2params:
205 205 return False
206 206 value = r1params[key][0]
207 207 # we want to compare json payloads without worrying about ordering
208 208 if value.startswith(b'{') and value.endswith(b'}'):
209 209 r1json = pycompat.json_loads(value)
210 210 r2json = pycompat.json_loads(r2params[key][0])
211 211 if r1json != r2json:
212 212 return False
213 213 elif r2params[key][0] != value:
214 214 return False
215 215 return True
216 216
217 217 def sanitiserequest(request):
218 218 request.body = re.sub(
219 219 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
220 220 )
221 221 return request
222 222
223 223 def sanitiseresponse(response):
224 224 if 'set-cookie' in response['headers']:
225 225 del response['headers']['set-cookie']
226 226 return response
227 227
228 228 def decorate(fn):
229 229 def inner(*args, **kwargs):
230 230 cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None))
231 231 if cassette:
232 232 import hgdemandimport
233 233
234 234 with hgdemandimport.deactivated():
235 235 import vcr as vcrmod
236 236 import vcr.stubs as stubs
237 237
238 238 vcr = vcrmod.VCR(
239 239 serializer='json',
240 240 before_record_request=sanitiserequest,
241 241 before_record_response=sanitiseresponse,
242 242 custom_patches=[
243 243 (
244 244 urlmod,
245 245 'httpconnection',
246 246 stubs.VCRHTTPConnection,
247 247 ),
248 248 (
249 249 urlmod,
250 250 'httpsconnection',
251 251 stubs.VCRHTTPSConnection,
252 252 ),
253 253 ],
254 254 )
255 255 vcr.register_matcher('hgmatcher', hgmatcher)
256 256 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
257 257 return fn(*args, **kwargs)
258 258 return fn(*args, **kwargs)
259 259
260 260 cmd = util.checksignature(inner, depth=2)
261 261 cmd.__name__ = fn.__name__
262 262 cmd.__doc__ = fn.__doc__
263 263
264 264 return command(
265 265 name,
266 266 fullflags,
267 267 spec,
268 268 helpcategory=helpcategory,
269 269 optionalrepo=optionalrepo,
270 270 )(cmd)
271 271
272 272 return decorate
273 273
274 274
275 275 def urlencodenested(params):
276 276 """like urlencode, but works with nested parameters.
277 277
278 278 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
279 279 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
280 280 urlencode. Note: the encoding is consistent with PHP's http_build_query.
281 281 """
282 282 flatparams = util.sortdict()
283 283
284 284 def process(prefix, obj):
285 285 if isinstance(obj, bool):
286 286 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
287 287 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
288 288 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
289 289 if items is None:
290 290 flatparams[prefix] = obj
291 291 else:
292 292 for k, v in items(obj):
293 293 if prefix:
294 294 process(b'%s[%s]' % (prefix, k), v)
295 295 else:
296 296 process(k, v)
297 297
298 298 process(b'', params)
299 299 return util.urlreq.urlencode(flatparams)
300 300
301 301
302 302 def readurltoken(ui):
303 303 """return conduit url, token and make sure they exist
304 304
305 305 Currently read from [auth] config section. In the future, it might
306 306 make sense to read from .arcconfig and .arcrc as well.
307 307 """
308 308 url = ui.config(b'phabricator', b'url')
309 309 if not url:
310 310 raise error.Abort(
311 311 _(b'config %s.%s is required') % (b'phabricator', b'url')
312 312 )
313 313
314 314 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
315 315 token = None
316 316
317 317 if res:
318 318 group, auth = res
319 319
320 320 ui.debug(b"using auth.%s.* for authentication\n" % group)
321 321
322 322 token = auth.get(b'phabtoken')
323 323
324 324 if not token:
325 325 raise error.Abort(
326 326 _(b'Can\'t find conduit token associated to %s') % (url,)
327 327 )
328 328
329 329 return url, token
330 330
331 331
332 332 def callconduit(ui, name, params):
333 333 """call Conduit API, params is a dict. return json.loads result, or None"""
334 334 host, token = readurltoken(ui)
335 335 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
336 336 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
337 337 params = params.copy()
338 338 params[b'__conduit__'] = {
339 339 b'token': token,
340 340 }
341 341 rawdata = {
342 342 b'params': templatefilters.json(params),
343 343 b'output': b'json',
344 344 b'__conduit__': 1,
345 345 }
346 346 data = urlencodenested(rawdata)
347 347 curlcmd = ui.config(b'phabricator', b'curlcmd')
348 348 if curlcmd:
349 349 sin, sout = procutil.popen2(
350 350 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
351 351 )
352 352 sin.write(data)
353 353 sin.close()
354 354 body = sout.read()
355 355 else:
356 356 urlopener = urlmod.opener(ui, authinfo)
357 357 request = util.urlreq.request(pycompat.strurl(url), data=data)
358 358 with contextlib.closing(urlopener.open(request)) as rsp:
359 359 body = rsp.read()
360 360 ui.debug(b'Conduit Response: %s\n' % body)
361 361 parsed = pycompat.rapply(
362 362 lambda x: encoding.unitolocal(x)
363 363 if isinstance(x, pycompat.unicode)
364 364 else x,
365 365 # json.loads only accepts bytes from py3.6+
366 366 pycompat.json_loads(encoding.unifromlocal(body)),
367 367 )
368 368 if parsed.get(b'error_code'):
369 369 msg = _(b'Conduit Error (%s): %s') % (
370 370 parsed[b'error_code'],
371 371 parsed[b'error_info'],
372 372 )
373 373 raise error.Abort(msg)
374 374 return parsed[b'result']
375 375
376 376
377 377 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
378 378 def debugcallconduit(ui, repo, name):
379 379 """call Conduit API
380 380
381 381 Call parameters are read from stdin as a JSON blob. Result will be written
382 382 to stdout as a JSON blob.
383 383 """
384 384 # json.loads only accepts bytes from 3.6+
385 385 rawparams = encoding.unifromlocal(ui.fin.read())
386 386 # json.loads only returns unicode strings
387 387 params = pycompat.rapply(
388 388 lambda x: encoding.unitolocal(x)
389 389 if isinstance(x, pycompat.unicode)
390 390 else x,
391 391 pycompat.json_loads(rawparams),
392 392 )
393 393 # json.dumps only accepts unicode strings
394 394 result = pycompat.rapply(
395 395 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
396 396 callconduit(ui, name, params),
397 397 )
398 398 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
399 399 ui.write(b'%s\n' % encoding.unitolocal(s))
400 400
401 401
402 402 def getrepophid(repo):
403 403 """given callsign, return repository PHID or None"""
404 404 # developer config: phabricator.repophid
405 405 repophid = repo.ui.config(b'phabricator', b'repophid')
406 406 if repophid:
407 407 return repophid
408 408 callsign = repo.ui.config(b'phabricator', b'callsign')
409 409 if not callsign:
410 410 return None
411 411 query = callconduit(
412 412 repo.ui,
413 413 b'diffusion.repository.search',
414 414 {b'constraints': {b'callsigns': [callsign]}},
415 415 )
416 416 if len(query[b'data']) == 0:
417 417 return None
418 418 repophid = query[b'data'][0][b'phid']
419 419 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
420 420 return repophid
421 421
422 422
423 423 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
424 424 _differentialrevisiondescre = re.compile(
425 425 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
426 426 )
427 427
428 428
429 429 def getoldnodedrevmap(repo, nodelist):
430 430 """find previous nodes that has been sent to Phabricator
431 431
432 432 return {node: (oldnode, Differential diff, Differential Revision ID)}
433 433 for node in nodelist with known previous sent versions, or associated
434 434 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
435 435 be ``None``.
436 436
437 437 Examines commit messages like "Differential Revision:" to get the
438 438 association information.
439 439
440 440 If such commit message line is not found, examines all precursors and their
441 441 tags. Tags with format like "D1234" are considered a match and the node
442 442 with that tag, and the number after "D" (ex. 1234) will be returned.
443 443
444 444 The ``old node``, if not None, is guaranteed to be the last diff of
445 445 corresponding Differential Revision, and exist in the repo.
446 446 """
447 447 unfi = repo.unfiltered()
448 448 has_node = unfi.changelog.index.has_node
449 449
450 450 result = {} # {node: (oldnode?, lastdiff?, drev)}
451 451 toconfirm = {} # {node: (force, {precnode}, drev)}
452 452 for node in nodelist:
453 453 ctx = unfi[node]
454 454 # For tags like "D123", put them into "toconfirm" to verify later
455 455 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
456 456 for n in precnodes:
457 457 if has_node(n):
458 458 for tag in unfi.nodetags(n):
459 459 m = _differentialrevisiontagre.match(tag)
460 460 if m:
461 461 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
462 462 break
463 463 else:
464 464 continue # move to next predecessor
465 465 break # found a tag, stop
466 466 else:
467 467 # Check commit message
468 468 m = _differentialrevisiondescre.search(ctx.description())
469 469 if m:
470 470 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
471 471
472 472 # Double check if tags are genuine by collecting all old nodes from
473 473 # Phabricator, and expect precursors overlap with it.
474 474 if toconfirm:
475 475 drevs = [drev for force, precs, drev in toconfirm.values()]
476 476 alldiffs = callconduit(
477 477 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
478 478 )
479 479 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
480 480 for newnode, (force, precset, drev) in toconfirm.items():
481 481 diffs = [
482 482 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
483 483 ]
484 484
485 485 # "precursors" as known by Phabricator
486 486 phprecset = {getnode(d) for d in diffs}
487 487
488 488 # Ignore if precursors (Phabricator and local repo) do not overlap,
489 489 # and force is not set (when commit message says nothing)
490 490 if not force and not bool(phprecset & precset):
491 491 tagname = b'D%d' % drev
492 492 tags.tag(
493 493 repo,
494 494 tagname,
495 495 nullid,
496 496 message=None,
497 497 user=None,
498 498 date=None,
499 499 local=True,
500 500 )
501 501 unfi.ui.warn(
502 502 _(
503 503 b'D%d: local tag removed - does not match '
504 504 b'Differential history\n'
505 505 )
506 506 % drev
507 507 )
508 508 continue
509 509
510 510 # Find the last node using Phabricator metadata, and make sure it
511 511 # exists in the repo
512 512 oldnode = lastdiff = None
513 513 if diffs:
514 514 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
515 515 oldnode = getnode(lastdiff)
516 516 if oldnode and not has_node(oldnode):
517 517 oldnode = None
518 518
519 519 result[newnode] = (oldnode, lastdiff, drev)
520 520
521 521 return result
522 522
523 523
524 524 def getdrevmap(repo, revs):
525 525 """Return a dict mapping each rev in `revs` to their Differential Revision
526 526 ID or None.
527 527 """
528 528 result = {}
529 529 for rev in revs:
530 530 result[rev] = None
531 531 ctx = repo[rev]
532 532 # Check commit message
533 533 m = _differentialrevisiondescre.search(ctx.description())
534 534 if m:
535 535 result[rev] = int(m.group('id'))
536 536 continue
537 537 # Check tags
538 538 for tag in repo.nodetags(ctx.node()):
539 539 m = _differentialrevisiontagre.match(tag)
540 540 if m:
541 541 result[rev] = int(m.group(1))
542 542 break
543 543
544 544 return result
545 545
546 546
547 547 def getdiff(ctx, diffopts):
548 548 """plain-text diff without header (user, commit message, etc)"""
549 549 output = util.stringio()
550 550 for chunk, _label in patch.diffui(
551 551 ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
552 552 ):
553 553 output.write(chunk)
554 554 return output.getvalue()
555 555
556 556
557 557 class DiffChangeType(object):
558 558 ADD = 1
559 559 CHANGE = 2
560 560 DELETE = 3
561 561 MOVE_AWAY = 4
562 562 COPY_AWAY = 5
563 563 MOVE_HERE = 6
564 564 COPY_HERE = 7
565 565 MULTICOPY = 8
566 566
567 567
568 568 class DiffFileType(object):
569 569 TEXT = 1
570 570 IMAGE = 2
571 571 BINARY = 3
572 572
573 573
574 574 @attr.s
575 575 class phabhunk(dict):
576 576 """Represents a Differential hunk, which is owned by a Differential change
577 577 """
578 578
579 579 oldOffset = attr.ib(default=0) # camelcase-required
580 580 oldLength = attr.ib(default=0) # camelcase-required
581 581 newOffset = attr.ib(default=0) # camelcase-required
582 582 newLength = attr.ib(default=0) # camelcase-required
583 583 corpus = attr.ib(default='')
584 584 # These get added to the phabchange's equivalents
585 585 addLines = attr.ib(default=0) # camelcase-required
586 586 delLines = attr.ib(default=0) # camelcase-required
587 587
588 588
589 589 @attr.s
590 590 class phabchange(object):
591 591 """Represents a Differential change, owns Differential hunks and owned by a
592 592 Differential diff. Each one represents one file in a diff.
593 593 """
594 594
595 595 currentPath = attr.ib(default=None) # camelcase-required
596 596 oldPath = attr.ib(default=None) # camelcase-required
597 597 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
598 598 metadata = attr.ib(default=attr.Factory(dict))
599 599 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
600 600 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
601 601 type = attr.ib(default=DiffChangeType.CHANGE)
602 602 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
603 603 commitHash = attr.ib(default=None) # camelcase-required
604 604 addLines = attr.ib(default=0) # camelcase-required
605 605 delLines = attr.ib(default=0) # camelcase-required
606 606 hunks = attr.ib(default=attr.Factory(list))
607 607
608 608 def copynewmetadatatoold(self):
609 609 for key in list(self.metadata.keys()):
610 610 newkey = key.replace(b'new:', b'old:')
611 611 self.metadata[newkey] = self.metadata[key]
612 612
613 613 def addoldmode(self, value):
614 614 self.oldProperties[b'unix:filemode'] = value
615 615
616 616 def addnewmode(self, value):
617 617 self.newProperties[b'unix:filemode'] = value
618 618
619 619 def addhunk(self, hunk):
620 620 if not isinstance(hunk, phabhunk):
621 621 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
622 622 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
623 623 # It's useful to include these stats since the Phab web UI shows them,
624 624 # and uses them to estimate how large a change a Revision is. Also used
625 625 # in email subjects for the [+++--] bit.
626 626 self.addLines += hunk.addLines
627 627 self.delLines += hunk.delLines
628 628
629 629
630 630 @attr.s
631 631 class phabdiff(object):
632 632 """Represents a Differential diff, owns Differential changes. Corresponds
633 633 to a commit.
634 634 """
635 635
636 636 # Doesn't seem to be any reason to send this (output of uname -n)
637 637 sourceMachine = attr.ib(default=b'') # camelcase-required
638 638 sourcePath = attr.ib(default=b'/') # camelcase-required
639 639 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
640 640 sourceControlPath = attr.ib(default=b'/') # camelcase-required
641 641 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
642 642 branch = attr.ib(default=b'default')
643 643 bookmark = attr.ib(default=None)
644 644 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
645 645 lintStatus = attr.ib(default=b'none') # camelcase-required
646 646 unitStatus = attr.ib(default=b'none') # camelcase-required
647 647 changes = attr.ib(default=attr.Factory(dict))
648 648 repositoryPHID = attr.ib(default=None) # camelcase-required
649 649
650 650 def addchange(self, change):
651 651 if not isinstance(change, phabchange):
652 652 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
653 653 self.changes[change.currentPath] = pycompat.byteskwargs(
654 654 attr.asdict(change)
655 655 )
656 656
657 657
658 658 def maketext(pchange, ctx, fname):
659 659 """populate the phabchange for a text file"""
660 660 repo = ctx.repo()
661 661 fmatcher = match.exact([fname])
662 662 diffopts = mdiff.diffopts(git=True, context=32767)
663 663 _pfctx, _fctx, header, fhunks = next(
664 664 patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts)
665 665 )
666 666
667 667 for fhunk in fhunks:
668 668 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
669 669 corpus = b''.join(lines[1:])
670 670 shunk = list(header)
671 671 shunk.extend(lines)
672 672 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
673 673 patch.diffstatdata(util.iterlines(shunk))
674 674 )
675 675 pchange.addhunk(
676 676 phabhunk(
677 677 oldOffset,
678 678 oldLength,
679 679 newOffset,
680 680 newLength,
681 681 corpus,
682 682 addLines,
683 683 delLines,
684 684 )
685 685 )
686 686
687 687
688 688 def uploadchunks(fctx, fphid):
689 689 """upload large binary files as separate chunks.
690 690 Phab requests chunking over 8MiB, and splits into 4MiB chunks
691 691 """
692 692 ui = fctx.repo().ui
693 693 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
694 694 with ui.makeprogress(
695 695 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
696 696 ) as progress:
697 697 for chunk in chunks:
698 698 progress.increment()
699 699 if chunk[b'complete']:
700 700 continue
701 701 bstart = int(chunk[b'byteStart'])
702 702 bend = int(chunk[b'byteEnd'])
703 703 callconduit(
704 704 ui,
705 705 b'file.uploadchunk',
706 706 {
707 707 b'filePHID': fphid,
708 708 b'byteStart': bstart,
709 709 b'data': base64.b64encode(fctx.data()[bstart:bend]),
710 710 b'dataEncoding': b'base64',
711 711 },
712 712 )
713 713
714 714
715 715 def uploadfile(fctx):
716 716 """upload binary files to Phabricator"""
717 717 repo = fctx.repo()
718 718 ui = repo.ui
719 719 fname = fctx.path()
720 720 size = fctx.size()
721 721 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
722 722
723 723 # an allocate call is required first to see if an upload is even required
724 724 # (Phab might already have it) and to determine if chunking is needed
725 725 allocateparams = {
726 726 b'name': fname,
727 727 b'contentLength': size,
728 728 b'contentHash': fhash,
729 729 }
730 730 filealloc = callconduit(ui, b'file.allocate', allocateparams)
731 731 fphid = filealloc[b'filePHID']
732 732
733 733 if filealloc[b'upload']:
734 734 ui.write(_(b'uploading %s\n') % bytes(fctx))
735 735 if not fphid:
736 736 uploadparams = {
737 737 b'name': fname,
738 738 b'data_base64': base64.b64encode(fctx.data()),
739 739 }
740 740 fphid = callconduit(ui, b'file.upload', uploadparams)
741 741 else:
742 742 uploadchunks(fctx, fphid)
743 743 else:
744 744 ui.debug(b'server already has %s\n' % bytes(fctx))
745 745
746 746 if not fphid:
747 747 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
748 748
749 749 return fphid
750 750
751 751
752 752 def addoldbinary(pchange, oldfctx, fctx):
753 753 """add the metadata for the previous version of a binary file to the
754 754 phabchange for the new version
755 755
756 756 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
757 757 version of the file, or None if the file is being removed.
758 758 """
759 759 if not fctx or fctx.cmp(oldfctx):
760 760 # Files differ, add the old one
761 761 pchange.metadata[b'old:file:size'] = oldfctx.size()
762 762 mimeguess, _enc = mimetypes.guess_type(
763 763 encoding.unifromlocal(oldfctx.path())
764 764 )
765 765 if mimeguess:
766 766 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
767 767 mimeguess
768 768 )
769 769 fphid = uploadfile(oldfctx)
770 770 pchange.metadata[b'old:binary-phid'] = fphid
771 771 else:
772 772 # If it's left as IMAGE/BINARY web UI might try to display it
773 773 pchange.fileType = DiffFileType.TEXT
774 774 pchange.copynewmetadatatoold()
775 775
776 776
777 777 def makebinary(pchange, fctx):
778 778 """populate the phabchange for a binary file"""
779 779 pchange.fileType = DiffFileType.BINARY
780 780 fphid = uploadfile(fctx)
781 781 pchange.metadata[b'new:binary-phid'] = fphid
782 782 pchange.metadata[b'new:file:size'] = fctx.size()
783 783 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
784 784 if mimeguess:
785 785 mimeguess = pycompat.bytestr(mimeguess)
786 786 pchange.metadata[b'new:file:mime-type'] = mimeguess
787 787 if mimeguess.startswith(b'image/'):
788 788 pchange.fileType = DiffFileType.IMAGE
789 789
790 790
791 791 # Copied from mercurial/patch.py
792 792 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
793 793
794 794
795 795 def notutf8(fctx):
796 796 """detect non-UTF-8 text files since Phabricator requires them to be marked
797 797 as binary
798 798 """
799 799 try:
800 800 fctx.data().decode('utf-8')
801 801 return False
802 802 except UnicodeDecodeError:
803 803 fctx.repo().ui.write(
804 804 _(b'file %s detected as non-UTF-8, marked as binary\n')
805 805 % fctx.path()
806 806 )
807 807 return True
808 808
809 809
810 810 def addremoved(pdiff, ctx, removed):
811 811 """add removed files to the phabdiff. Shouldn't include moves"""
812 812 for fname in removed:
813 813 pchange = phabchange(
814 814 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
815 815 )
816 816 pchange.addoldmode(gitmode[ctx.p1()[fname].flags()])
817 817 oldfctx = ctx.p1()[fname]
818 818 if not (oldfctx.isbinary() or notutf8(oldfctx)):
819 819 maketext(pchange, ctx, fname)
820 820
821 821 pdiff.addchange(pchange)
822 822
823 823
824 824 def addmodified(pdiff, ctx, modified):
825 825 """add modified files to the phabdiff"""
826 826 for fname in modified:
827 827 fctx = ctx[fname]
828 828 oldfctx = fctx.p1()
829 829 pchange = phabchange(currentPath=fname, oldPath=fname)
830 830 filemode = gitmode[ctx[fname].flags()]
831 831 originalmode = gitmode[ctx.p1()[fname].flags()]
832 832 if filemode != originalmode:
833 833 pchange.addoldmode(originalmode)
834 834 pchange.addnewmode(filemode)
835 835
836 836 if (
837 837 fctx.isbinary()
838 838 or notutf8(fctx)
839 839 or oldfctx.isbinary()
840 840 or notutf8(oldfctx)
841 841 ):
842 842 makebinary(pchange, fctx)
843 843 addoldbinary(pchange, fctx.p1(), fctx)
844 844 else:
845 845 maketext(pchange, ctx, fname)
846 846
847 847 pdiff.addchange(pchange)
848 848
849 849
850 850 def addadded(pdiff, ctx, added, removed):
851 851 """add file adds to the phabdiff, both new files and copies/moves"""
852 852 # Keep track of files that've been recorded as moved/copied, so if there are
853 853 # additional copies we can mark them (moves get removed from removed)
854 854 copiedchanges = {}
855 855 movedchanges = {}
856 856 for fname in added:
857 857 fctx = ctx[fname]
858 858 oldfctx = None
859 859 pchange = phabchange(currentPath=fname)
860 860
861 861 filemode = gitmode[ctx[fname].flags()]
862 862 renamed = fctx.renamed()
863 863
864 864 if renamed:
865 865 originalfname = renamed[0]
866 866 oldfctx = ctx.p1()[originalfname]
867 867 originalmode = gitmode[oldfctx.flags()]
868 868 pchange.oldPath = originalfname
869 869
870 870 if originalfname in removed:
871 871 origpchange = phabchange(
872 872 currentPath=originalfname,
873 873 oldPath=originalfname,
874 874 type=DiffChangeType.MOVE_AWAY,
875 875 awayPaths=[fname],
876 876 )
877 877 movedchanges[originalfname] = origpchange
878 878 removed.remove(originalfname)
879 879 pchange.type = DiffChangeType.MOVE_HERE
880 880 elif originalfname in movedchanges:
881 881 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
882 882 movedchanges[originalfname].awayPaths.append(fname)
883 883 pchange.type = DiffChangeType.COPY_HERE
884 884 else: # pure copy
885 885 if originalfname not in copiedchanges:
886 886 origpchange = phabchange(
887 887 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
888 888 )
889 889 copiedchanges[originalfname] = origpchange
890 890 else:
891 891 origpchange = copiedchanges[originalfname]
892 892 origpchange.awayPaths.append(fname)
893 893 pchange.type = DiffChangeType.COPY_HERE
894 894
895 895 if filemode != originalmode:
896 896 pchange.addoldmode(originalmode)
897 897 pchange.addnewmode(filemode)
898 898 else: # Brand-new file
899 899 pchange.addnewmode(gitmode[fctx.flags()])
900 900 pchange.type = DiffChangeType.ADD
901 901
902 902 if (
903 903 fctx.isbinary()
904 904 or notutf8(fctx)
905 905 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
906 906 ):
907 907 makebinary(pchange, fctx)
908 908 if renamed:
909 909 addoldbinary(pchange, oldfctx, fctx)
910 910 else:
911 911 maketext(pchange, ctx, fname)
912 912
913 913 pdiff.addchange(pchange)
914 914
915 915 for _path, copiedchange in copiedchanges.items():
916 916 pdiff.addchange(copiedchange)
917 917 for _path, movedchange in movedchanges.items():
918 918 pdiff.addchange(movedchange)
919 919
920 920
921 921 def creatediff(ctx):
922 922 """create a Differential Diff"""
923 923 repo = ctx.repo()
924 924 repophid = getrepophid(repo)
925 925 # Create a "Differential Diff" via "differential.creatediff" API
926 926 pdiff = phabdiff(
927 927 sourceControlBaseRevision=b'%s' % ctx.p1().hex(),
928 928 branch=b'%s' % ctx.branch(),
929 929 )
930 930 modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx)
931 931 # addadded will remove moved files from removed, so addremoved won't get
932 932 # them
933 933 addadded(pdiff, ctx, added, removed)
934 934 addmodified(pdiff, ctx, modified)
935 935 addremoved(pdiff, ctx, removed)
936 936 if repophid:
937 937 pdiff.repositoryPHID = repophid
938 938 diff = callconduit(
939 939 repo.ui,
940 940 b'differential.creatediff',
941 941 pycompat.byteskwargs(attr.asdict(pdiff)),
942 942 )
943 943 if not diff:
944 944 raise error.Abort(_(b'cannot create diff for %s') % ctx)
945 945 return diff
946 946
947 947
948 948 def writediffproperties(ctx, diff):
949 949 """write metadata to diff so patches could be applied losslessly"""
950 950 # creatediff returns with a diffid but query returns with an id
951 951 diffid = diff.get(b'diffid', diff.get(b'id'))
952 952 params = {
953 953 b'diff_id': diffid,
954 954 b'name': b'hg:meta',
955 955 b'data': templatefilters.json(
956 956 {
957 957 b'user': ctx.user(),
958 958 b'date': b'%d %d' % ctx.date(),
959 959 b'branch': ctx.branch(),
960 960 b'node': ctx.hex(),
961 961 b'parent': ctx.p1().hex(),
962 962 }
963 963 ),
964 964 }
965 965 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
966 966
967 967 params = {
968 968 b'diff_id': diffid,
969 969 b'name': b'local:commits',
970 970 b'data': templatefilters.json(
971 971 {
972 972 ctx.hex(): {
973 973 b'author': stringutil.person(ctx.user()),
974 974 b'authorEmail': stringutil.email(ctx.user()),
975 975 b'time': int(ctx.date()[0]),
976 976 b'commit': ctx.hex(),
977 977 b'parents': [ctx.p1().hex()],
978 978 b'branch': ctx.branch(),
979 979 },
980 980 }
981 981 ),
982 982 }
983 983 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
984 984
985 985
986 986 def createdifferentialrevision(
987 987 ctx,
988 988 revid=None,
989 989 parentrevphid=None,
990 990 oldnode=None,
991 991 olddiff=None,
992 992 actions=None,
993 993 comment=None,
994 994 ):
995 995 """create or update a Differential Revision
996 996
997 997 If revid is None, create a new Differential Revision, otherwise update
998 998 revid. If parentrevphid is not None, set it as a dependency.
999 999
1000 1000 If oldnode is not None, check if the patch content (without commit message
1001 1001 and metadata) has changed before creating another diff.
1002 1002
1003 1003 If actions is not None, they will be appended to the transaction.
1004 1004 """
1005 1005 repo = ctx.repo()
1006 1006 if oldnode:
1007 1007 diffopts = mdiff.diffopts(git=True, context=32767)
1008 1008 oldctx = repo.unfiltered()[oldnode]
1009 1009 neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
1010 1010 else:
1011 1011 neednewdiff = True
1012 1012
1013 1013 transactions = []
1014 1014 if neednewdiff:
1015 1015 diff = creatediff(ctx)
1016 1016 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1017 1017 if comment:
1018 1018 transactions.append({b'type': b'comment', b'value': comment})
1019 1019 else:
1020 1020 # Even if we don't need to upload a new diff because the patch content
1021 1021 # does not change. We might still need to update its metadata so
1022 1022 # pushers could know the correct node metadata.
1023 1023 assert olddiff
1024 1024 diff = olddiff
1025 1025 writediffproperties(ctx, diff)
1026 1026
1027 1027 # Set the parent Revision every time, so commit re-ordering is picked-up
1028 1028 if parentrevphid:
1029 1029 transactions.append(
1030 1030 {b'type': b'parents.set', b'value': [parentrevphid]}
1031 1031 )
1032 1032
1033 1033 if actions:
1034 1034 transactions += actions
1035 1035
1036 1036 # Parse commit message and update related fields.
1037 1037 desc = ctx.description()
1038 1038 info = callconduit(
1039 1039 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1040 1040 )
1041 1041 for k, v in info[b'fields'].items():
1042 1042 if k in [b'title', b'summary', b'testPlan']:
1043 1043 transactions.append({b'type': k, b'value': v})
1044 1044
1045 1045 params = {b'transactions': transactions}
1046 1046 if revid is not None:
1047 1047 # Update an existing Differential Revision
1048 1048 params[b'objectIdentifier'] = revid
1049 1049
1050 1050 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1051 1051 if not revision:
1052 1052 raise error.Abort(_(b'cannot create revision for %s') % ctx)
1053 1053
1054 1054 return revision, diff
1055 1055
1056 1056
1057 1057 def userphids(ui, names):
1058 1058 """convert user names to PHIDs"""
1059 1059 names = [name.lower() for name in names]
1060 1060 query = {b'constraints': {b'usernames': names}}
1061 1061 result = callconduit(ui, b'user.search', query)
1062 1062 # username not found is not an error of the API. So check if we have missed
1063 1063 # some names here.
1064 1064 data = result[b'data']
1065 1065 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1066 1066 unresolved = set(names) - resolved
1067 1067 if unresolved:
1068 1068 raise error.Abort(
1069 1069 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1070 1070 )
1071 1071 return [entry[b'phid'] for entry in data]
1072 1072
1073 1073
1074 1074 @vcrcommand(
1075 1075 b'phabsend',
1076 1076 [
1077 1077 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1078 1078 (b'', b'amend', True, _(b'update commit messages')),
1079 1079 (b'', b'reviewer', [], _(b'specify reviewers')),
1080 1080 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1081 1081 (
1082 1082 b'm',
1083 1083 b'comment',
1084 1084 b'',
1085 1085 _(b'add a comment to Revisions with new/updated Diffs'),
1086 1086 ),
1087 1087 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1088 1088 ],
1089 1089 _(b'REV [OPTIONS]'),
1090 1090 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1091 1091 )
1092 1092 def phabsend(ui, repo, *revs, **opts):
1093 1093 """upload changesets to Phabricator
1094 1094
1095 1095 If there are multiple revisions specified, they will be send as a stack
1096 1096 with a linear dependencies relationship using the order specified by the
1097 1097 revset.
1098 1098
1099 1099 For the first time uploading changesets, local tags will be created to
1100 1100 maintain the association. After the first time, phabsend will check
1101 1101 obsstore and tags information so it can figure out whether to update an
1102 1102 existing Differential Revision, or create a new one.
1103 1103
1104 1104 If --amend is set, update commit messages so they have the
1105 1105 ``Differential Revision`` URL, remove related tags. This is similar to what
1106 1106 arcanist will do, and is more desired in author-push workflows. Otherwise,
1107 1107 use local tags to record the ``Differential Revision`` association.
1108 1108
1109 1109 The --confirm option lets you confirm changesets before sending them. You
1110 1110 can also add following to your configuration file to make it default
1111 1111 behaviour::
1112 1112
1113 1113 [phabsend]
1114 1114 confirm = true
1115 1115
1116 1116 phabsend will check obsstore and the above association to decide whether to
1117 1117 update an existing Differential Revision, or create a new one.
1118 1118 """
1119 1119 opts = pycompat.byteskwargs(opts)
1120 1120 revs = list(revs) + opts.get(b'rev', [])
1121 1121 revs = scmutil.revrange(repo, revs)
1122 1122 revs.sort() # ascending order to preserve topological parent/child in phab
1123 1123
1124 1124 if not revs:
1125 1125 raise error.Abort(_(b'phabsend requires at least one changeset'))
1126 1126 if opts.get(b'amend'):
1127 1127 cmdutil.checkunfinished(repo)
1128 1128
1129 1129 # {newnode: (oldnode, olddiff, olddrev}
1130 1130 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1131 1131
1132 1132 confirm = ui.configbool(b'phabsend', b'confirm')
1133 1133 confirm |= bool(opts.get(b'confirm'))
1134 1134 if confirm:
1135 1135 confirmed = _confirmbeforesend(repo, revs, oldmap)
1136 1136 if not confirmed:
1137 1137 raise error.Abort(_(b'phabsend cancelled'))
1138 1138
1139 1139 actions = []
1140 1140 reviewers = opts.get(b'reviewer', [])
1141 1141 blockers = opts.get(b'blocker', [])
1142 1142 phids = []
1143 1143 if reviewers:
1144 1144 phids.extend(userphids(repo.ui, reviewers))
1145 1145 if blockers:
1146 1146 phids.extend(
1147 1147 map(
1148 1148 lambda phid: b'blocking(%s)' % phid,
1149 1149 userphids(repo.ui, blockers),
1150 1150 )
1151 1151 )
1152 1152 if phids:
1153 1153 actions.append({b'type': b'reviewers.add', b'value': phids})
1154 1154
1155 1155 drevids = [] # [int]
1156 1156 diffmap = {} # {newnode: diff}
1157 1157
1158 1158 # Send patches one by one so we know their Differential Revision PHIDs and
1159 1159 # can provide dependency relationship
1160 1160 lastrevphid = None
1161 1161 for rev in revs:
1162 1162 ui.debug(b'sending rev %d\n' % rev)
1163 1163 ctx = repo[rev]
1164 1164
1165 1165 # Get Differential Revision ID
1166 1166 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1167 1167 if oldnode != ctx.node() or opts.get(b'amend'):
1168 1168 # Create or update Differential Revision
1169 1169 revision, diff = createdifferentialrevision(
1170 1170 ctx,
1171 1171 revid,
1172 1172 lastrevphid,
1173 1173 oldnode,
1174 1174 olddiff,
1175 1175 actions,
1176 1176 opts.get(b'comment'),
1177 1177 )
1178 1178 diffmap[ctx.node()] = diff
1179 1179 newrevid = int(revision[b'object'][b'id'])
1180 1180 newrevphid = revision[b'object'][b'phid']
1181 1181 if revid:
1182 1182 action = b'updated'
1183 1183 else:
1184 1184 action = b'created'
1185 1185
1186 1186 # Create a local tag to note the association, if commit message
1187 1187 # does not have it already
1188 1188 m = _differentialrevisiondescre.search(ctx.description())
1189 1189 if not m or int(m.group('id')) != newrevid:
1190 1190 tagname = b'D%d' % newrevid
1191 1191 tags.tag(
1192 1192 repo,
1193 1193 tagname,
1194 1194 ctx.node(),
1195 1195 message=None,
1196 1196 user=None,
1197 1197 date=None,
1198 1198 local=True,
1199 1199 )
1200 1200 else:
1201 1201 # Nothing changed. But still set "newrevphid" so the next revision
1202 1202 # could depend on this one and "newrevid" for the summary line.
1203 1203 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1204 1204 newrevid = revid
1205 1205 action = b'skipped'
1206 1206
1207 1207 actiondesc = ui.label(
1208 1208 {
1209 1209 b'created': _(b'created'),
1210 1210 b'skipped': _(b'skipped'),
1211 1211 b'updated': _(b'updated'),
1212 1212 }[action],
1213 1213 b'phabricator.action.%s' % action,
1214 1214 )
1215 1215 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1216 1216 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1217 1217 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1218 1218 ui.write(
1219 1219 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
1220 1220 )
1221 1221 drevids.append(newrevid)
1222 1222 lastrevphid = newrevphid
1223 1223
1224 1224 # Update commit messages and remove tags
1225 1225 if opts.get(b'amend'):
1226 1226 unfi = repo.unfiltered()
1227 1227 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1228 1228 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1229 1229 wnode = unfi[b'.'].node()
1230 1230 mapping = {} # {oldnode: [newnode]}
1231 1231 for i, rev in enumerate(revs):
1232 1232 old = unfi[rev]
1233 1233 drevid = drevids[i]
1234 1234 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1235 1235 newdesc = getdescfromdrev(drev)
1236 1236 # Make sure commit message contain "Differential Revision"
1237 1237 if old.description() != newdesc:
1238 1238 if old.phase() == phases.public:
1239 1239 ui.warn(
1240 1240 _(b"warning: not updating public commit %s\n")
1241 1241 % scmutil.formatchangeid(old)
1242 1242 )
1243 1243 continue
1244 1244 parents = [
1245 1245 mapping.get(old.p1().node(), (old.p1(),))[0],
1246 1246 mapping.get(old.p2().node(), (old.p2(),))[0],
1247 1247 ]
1248 1248 new = context.metadataonlyctx(
1249 1249 repo,
1250 1250 old,
1251 1251 parents=parents,
1252 1252 text=newdesc,
1253 1253 user=old.user(),
1254 1254 date=old.date(),
1255 1255 extra=old.extra(),
1256 1256 )
1257 1257
1258 1258 newnode = new.commit()
1259 1259
1260 1260 mapping[old.node()] = [newnode]
1261 1261 # Update diff property
1262 1262 # If it fails just warn and keep going, otherwise the DREV
1263 1263 # associations will be lost
1264 1264 try:
1265 1265 writediffproperties(unfi[newnode], diffmap[old.node()])
1266 1266 except util.urlerr.urlerror:
1267 1267 ui.warnnoi18n(
1268 1268 b'Failed to update metadata for D%d\n' % drevid
1269 1269 )
1270 1270 # Remove local tags since it's no longer necessary
1271 1271 tagname = b'D%d' % drevid
1272 1272 if tagname in repo.tags():
1273 1273 tags.tag(
1274 1274 repo,
1275 1275 tagname,
1276 1276 nullid,
1277 1277 message=None,
1278 1278 user=None,
1279 1279 date=None,
1280 1280 local=True,
1281 1281 )
1282 1282 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1283 1283 if wnode in mapping:
1284 1284 unfi.setparents(mapping[wnode][0])
1285 1285
1286 1286
1287 1287 # Map from "hg:meta" keys to header understood by "hg import". The order is
1288 1288 # consistent with "hg export" output.
1289 1289 _metanamemap = util.sortdict(
1290 1290 [
1291 1291 (b'user', b'User'),
1292 1292 (b'date', b'Date'),
1293 1293 (b'branch', b'Branch'),
1294 1294 (b'node', b'Node ID'),
1295 1295 (b'parent', b'Parent '),
1296 1296 ]
1297 1297 )
1298 1298
1299 1299
1300 1300 def _confirmbeforesend(repo, revs, oldmap):
1301 1301 url, token = readurltoken(repo.ui)
1302 1302 ui = repo.ui
1303 1303 for rev in revs:
1304 1304 ctx = repo[rev]
1305 1305 desc = ctx.description().splitlines()[0]
1306 1306 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1307 1307 if drevid:
1308 1308 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1309 1309 else:
1310 1310 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1311 1311
1312 1312 ui.write(
1313 1313 _(b'%s - %s: %s\n')
1314 1314 % (
1315 1315 drevdesc,
1316 1316 ui.label(bytes(ctx), b'phabricator.node'),
1317 1317 ui.label(desc, b'phabricator.desc'),
1318 1318 )
1319 1319 )
1320 1320
1321 1321 if ui.promptchoice(
1322 1322 _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url
1323 1323 ):
1324 1324 return False
1325 1325
1326 1326 return True
1327 1327
1328 1328
1329 1329 _knownstatusnames = {
1330 1330 b'accepted',
1331 1331 b'needsreview',
1332 1332 b'needsrevision',
1333 1333 b'closed',
1334 1334 b'abandoned',
1335 1335 b'changesplanned',
1336 1336 }
1337 1337
1338 1338
1339 1339 def _getstatusname(drev):
1340 1340 """get normalized status name from a Differential Revision"""
1341 1341 return drev[b'statusName'].replace(b' ', b'').lower()
1342 1342
1343 1343
1344 1344 # Small language to specify differential revisions. Support symbols: (), :X,
1345 1345 # +, and -.
1346 1346
1347 1347 _elements = {
1348 1348 # token-type: binding-strength, primary, prefix, infix, suffix
1349 1349 b'(': (12, None, (b'group', 1, b')'), None, None),
1350 1350 b':': (8, None, (b'ancestors', 8), None, None),
1351 1351 b'&': (5, None, None, (b'and_', 5), None),
1352 1352 b'+': (4, None, None, (b'add', 4), None),
1353 1353 b'-': (4, None, None, (b'sub', 4), None),
1354 1354 b')': (0, None, None, None, None),
1355 1355 b'symbol': (0, b'symbol', None, None, None),
1356 1356 b'end': (0, None, None, None, None),
1357 1357 }
1358 1358
1359 1359
1360 1360 def _tokenize(text):
1361 1361 view = memoryview(text) # zero-copy slice
1362 1362 special = b'():+-& '
1363 1363 pos = 0
1364 1364 length = len(text)
1365 1365 while pos < length:
1366 1366 symbol = b''.join(
1367 1367 itertools.takewhile(
1368 1368 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1369 1369 )
1370 1370 )
1371 1371 if symbol:
1372 1372 yield (b'symbol', symbol, pos)
1373 1373 pos += len(symbol)
1374 1374 else: # special char, ignore space
1375 1375 if text[pos : pos + 1] != b' ':
1376 1376 yield (text[pos : pos + 1], None, pos)
1377 1377 pos += 1
1378 1378 yield (b'end', None, pos)
1379 1379
1380 1380
1381 1381 def _parse(text):
1382 1382 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1383 1383 if pos != len(text):
1384 1384 raise error.ParseError(b'invalid token', pos)
1385 1385 return tree
1386 1386
1387 1387
1388 1388 def _parsedrev(symbol):
1389 1389 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1390 1390 if symbol.startswith(b'D') and symbol[1:].isdigit():
1391 1391 return int(symbol[1:])
1392 1392 if symbol.isdigit():
1393 1393 return int(symbol)
1394 1394
1395 1395
1396 1396 def _prefetchdrevs(tree):
1397 1397 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1398 1398 drevs = set()
1399 1399 ancestordrevs = set()
1400 1400 op = tree[0]
1401 1401 if op == b'symbol':
1402 1402 r = _parsedrev(tree[1])
1403 1403 if r:
1404 1404 drevs.add(r)
1405 1405 elif op == b'ancestors':
1406 1406 r, a = _prefetchdrevs(tree[1])
1407 1407 drevs.update(r)
1408 1408 ancestordrevs.update(r)
1409 1409 ancestordrevs.update(a)
1410 1410 else:
1411 1411 for t in tree[1:]:
1412 1412 r, a = _prefetchdrevs(t)
1413 1413 drevs.update(r)
1414 1414 ancestordrevs.update(a)
1415 1415 return drevs, ancestordrevs
1416 1416
1417 1417
1418 1418 def querydrev(ui, spec):
1419 1419 """return a list of "Differential Revision" dicts
1420 1420
1421 1421 spec is a string using a simple query language, see docstring in phabread
1422 1422 for details.
1423 1423
1424 1424 A "Differential Revision dict" looks like:
1425 1425
1426 1426 {
1427 1427 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1428 1428 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1429 1429 "auxiliary": {
1430 1430 "phabricator:depends-on": [
1431 1431 "PHID-DREV-gbapp366kutjebt7agcd"
1432 1432 ]
1433 1433 "phabricator:projects": [],
1434 1434 },
1435 1435 "branch": "default",
1436 1436 "ccs": [],
1437 1437 "commits": [],
1438 1438 "dateCreated": "1499181406",
1439 1439 "dateModified": "1499182103",
1440 1440 "diffs": [
1441 1441 "3",
1442 1442 "4",
1443 1443 ],
1444 1444 "hashes": [],
1445 1445 "id": "2",
1446 1446 "lineCount": "2",
1447 1447 "phid": "PHID-DREV-672qvysjcczopag46qty",
1448 1448 "properties": {},
1449 1449 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1450 1450 "reviewers": [],
1451 1451 "sourcePath": null
1452 1452 "status": "0",
1453 1453 "statusName": "Needs Review",
1454 1454 "summary": "",
1455 1455 "testPlan": "",
1456 1456 "title": "example",
1457 1457 "uri": "https://phab.example.com/D2",
1458 1458 }
1459 1459 """
1460 1460 # TODO: replace differential.query and differential.querydiffs with
1461 1461 # differential.diff.search because the former (and their output) are
1462 1462 # frozen, and planned to be deprecated and removed.
1463 1463
1464 1464 def fetch(params):
1465 1465 """params -> single drev or None"""
1466 1466 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1467 1467 if key in prefetched:
1468 1468 return prefetched[key]
1469 1469 drevs = callconduit(ui, b'differential.query', params)
1470 1470 # Fill prefetched with the result
1471 1471 for drev in drevs:
1472 1472 prefetched[drev[b'phid']] = drev
1473 1473 prefetched[int(drev[b'id'])] = drev
1474 1474 if key not in prefetched:
1475 1475 raise error.Abort(
1476 1476 _(b'cannot get Differential Revision %r') % params
1477 1477 )
1478 1478 return prefetched[key]
1479 1479
1480 1480 def getstack(topdrevids):
1481 1481 """given a top, get a stack from the bottom, [id] -> [id]"""
1482 1482 visited = set()
1483 1483 result = []
1484 1484 queue = [{b'ids': [i]} for i in topdrevids]
1485 1485 while queue:
1486 1486 params = queue.pop()
1487 1487 drev = fetch(params)
1488 1488 if drev[b'id'] in visited:
1489 1489 continue
1490 1490 visited.add(drev[b'id'])
1491 1491 result.append(int(drev[b'id']))
1492 1492 auxiliary = drev.get(b'auxiliary', {})
1493 1493 depends = auxiliary.get(b'phabricator:depends-on', [])
1494 1494 for phid in depends:
1495 1495 queue.append({b'phids': [phid]})
1496 1496 result.reverse()
1497 1497 return smartset.baseset(result)
1498 1498
1499 1499 # Initialize prefetch cache
1500 1500 prefetched = {} # {id or phid: drev}
1501 1501
1502 1502 tree = _parse(spec)
1503 1503 drevs, ancestordrevs = _prefetchdrevs(tree)
1504 1504
1505 1505 # developer config: phabricator.batchsize
1506 1506 batchsize = ui.configint(b'phabricator', b'batchsize')
1507 1507
1508 1508 # Prefetch Differential Revisions in batch
1509 1509 tofetch = set(drevs)
1510 1510 for r in ancestordrevs:
1511 1511 tofetch.update(range(max(1, r - batchsize), r + 1))
1512 1512 if drevs:
1513 1513 fetch({b'ids': list(tofetch)})
1514 1514 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1515 1515
1516 1516 # Walk through the tree, return smartsets
1517 1517 def walk(tree):
1518 1518 op = tree[0]
1519 1519 if op == b'symbol':
1520 1520 drev = _parsedrev(tree[1])
1521 1521 if drev:
1522 1522 return smartset.baseset([drev])
1523 1523 elif tree[1] in _knownstatusnames:
1524 1524 drevs = [
1525 1525 r
1526 1526 for r in validids
1527 1527 if _getstatusname(prefetched[r]) == tree[1]
1528 1528 ]
1529 1529 return smartset.baseset(drevs)
1530 1530 else:
1531 1531 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1532 1532 elif op in {b'and_', b'add', b'sub'}:
1533 1533 assert len(tree) == 3
1534 1534 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1535 1535 elif op == b'group':
1536 1536 return walk(tree[1])
1537 1537 elif op == b'ancestors':
1538 1538 return getstack(walk(tree[1]))
1539 1539 else:
1540 1540 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1541 1541
1542 1542 return [prefetched[r] for r in walk(tree)]
1543 1543
1544 1544
1545 1545 def getdescfromdrev(drev):
1546 1546 """get description (commit message) from "Differential Revision"
1547 1547
1548 1548 This is similar to differential.getcommitmessage API. But we only care
1549 1549 about limited fields: title, summary, test plan, and URL.
1550 1550 """
1551 1551 title = drev[b'title']
1552 1552 summary = drev[b'summary'].rstrip()
1553 1553 testplan = drev[b'testPlan'].rstrip()
1554 1554 if testplan:
1555 1555 testplan = b'Test Plan:\n%s' % testplan
1556 1556 uri = b'Differential Revision: %s' % drev[b'uri']
1557 1557 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1558 1558
1559 1559
1560 1560 def getdiffmeta(diff):
1561 1561 """get commit metadata (date, node, user, p1) from a diff object
1562 1562
1563 1563 The metadata could be "hg:meta", sent by phabsend, like:
1564 1564
1565 1565 "properties": {
1566 1566 "hg:meta": {
1567 1567 "branch": "default",
1568 1568 "date": "1499571514 25200",
1569 1569 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1570 1570 "user": "Foo Bar <foo@example.com>",
1571 1571 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1572 1572 }
1573 1573 }
1574 1574
1575 1575 Or converted from "local:commits", sent by "arc", like:
1576 1576
1577 1577 "properties": {
1578 1578 "local:commits": {
1579 1579 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1580 1580 "author": "Foo Bar",
1581 1581 "authorEmail": "foo@example.com"
1582 1582 "branch": "default",
1583 1583 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1584 1584 "local": "1000",
1585 1585 "message": "...",
1586 1586 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1587 1587 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1588 1588 "summary": "...",
1589 1589 "tag": "",
1590 1590 "time": 1499546314,
1591 1591 }
1592 1592 }
1593 1593 }
1594 1594
1595 1595 Note: metadata extracted from "local:commits" will lose time zone
1596 1596 information.
1597 1597 """
1598 1598 props = diff.get(b'properties') or {}
1599 1599 meta = props.get(b'hg:meta')
1600 1600 if not meta:
1601 1601 if props.get(b'local:commits'):
1602 1602 commit = sorted(props[b'local:commits'].values())[0]
1603 1603 meta = {}
1604 1604 if b'author' in commit and b'authorEmail' in commit:
1605 1605 meta[b'user'] = b'%s <%s>' % (
1606 1606 commit[b'author'],
1607 1607 commit[b'authorEmail'],
1608 1608 )
1609 1609 if b'time' in commit:
1610 1610 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1611 1611 if b'branch' in commit:
1612 1612 meta[b'branch'] = commit[b'branch']
1613 1613 node = commit.get(b'commit', commit.get(b'rev'))
1614 1614 if node:
1615 1615 meta[b'node'] = node
1616 1616 if len(commit.get(b'parents', ())) >= 1:
1617 1617 meta[b'parent'] = commit[b'parents'][0]
1618 1618 else:
1619 1619 meta = {}
1620 1620 if b'date' not in meta and b'dateCreated' in diff:
1621 1621 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1622 1622 if b'branch' not in meta and diff.get(b'branch'):
1623 1623 meta[b'branch'] = diff[b'branch']
1624 1624 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1625 1625 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1626 1626 return meta
1627 1627
1628 1628
1629 1629 def readpatch(ui, drevs, write):
1630 1630 """generate plain-text patch readable by 'hg import'
1631 1631
1632 1632 write takes a list of (DREV, bytes), where DREV is the differential number
1633 1633 (as bytes, without the "D" prefix) and the bytes are the text of a patch
1634 1634 to be imported. drevs is what "querydrev" returns, results of
1635 1635 "differential.query".
1636 1636 """
1637 1637 # Prefetch hg:meta property for all diffs
1638 1638 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
1639 1639 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
1640 1640
1641 1641 patches = []
1642 1642
1643 1643 # Generate patch for each drev
1644 1644 for drev in drevs:
1645 1645 ui.note(_(b'reading D%s\n') % drev[b'id'])
1646 1646
1647 1647 diffid = max(int(v) for v in drev[b'diffs'])
1648 1648 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
1649 1649 desc = getdescfromdrev(drev)
1650 1650 header = b'# HG changeset patch\n'
1651 1651
1652 1652 # Try to preserve metadata from hg:meta property. Write hg patch
1653 1653 # headers that can be read by the "import" command. See patchheadermap
1654 1654 # and extract in mercurial/patch.py for supported headers.
1655 1655 meta = getdiffmeta(diffs[b'%d' % diffid])
1656 1656 for k in _metanamemap.keys():
1657 1657 if k in meta:
1658 1658 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
1659 1659
1660 1660 content = b'%s%s\n%s' % (header, desc, body)
1661 1661 patches.append((drev[b'id'], content))
1662 1662
1663 1663 # Write patches to the supplied callback
1664 1664 write(patches)
1665 1665
1666 1666
1667 1667 @vcrcommand(
1668 1668 b'phabread',
1669 1669 [(b'', b'stack', False, _(b'read dependencies'))],
1670 1670 _(b'DREVSPEC [OPTIONS]'),
1671 1671 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1672 1672 optionalrepo=True,
1673 1673 )
1674 1674 def phabread(ui, repo, spec, **opts):
1675 1675 """print patches from Phabricator suitable for importing
1676 1676
1677 1677 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
1678 1678 the number ``123``. It could also have common operators like ``+``, ``-``,
1679 1679 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
1680 1680 select a stack.
1681 1681
1682 1682 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
1683 1683 could be used to filter patches by status. For performance reason, they
1684 1684 only represent a subset of non-status selections and cannot be used alone.
1685 1685
1686 1686 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
1687 1687 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
1688 1688 stack up to D9.
1689 1689
1690 1690 If --stack is given, follow dependencies information and read all patches.
1691 1691 It is equivalent to the ``:`` operator.
1692 1692 """
1693 1693 opts = pycompat.byteskwargs(opts)
1694 1694 if opts.get(b'stack'):
1695 1695 spec = b':(%s)' % spec
1696 1696 drevs = querydrev(ui, spec)
1697 1697
1698 1698 def _write(patches):
1699 1699 for drev, content in patches:
1700 1700 ui.write(content)
1701 1701
1702 1702 readpatch(ui, drevs, _write)
1703 1703
1704 1704
1705 1705 @vcrcommand(
1706 b'phabimport',
1707 [(b'', b'stack', False, _(b'import dependencies as well'))],
1708 _(b'DREVSPEC [OPTIONS]'),
1709 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1710 )
1711 def phabimport(ui, repo, spec, **opts):
1712 """import patches from Phabricator for the specified Differential Revisions
1713
1714 The patches are read and applied starting at the parent of the working
1715 directory.
1716
1717 See ``hg help phabread`` for how to specify DREVSPEC.
1718 """
1719 opts = pycompat.byteskwargs(opts)
1720
1721 # --bypass avoids losing exec and symlink bits when importing on Windows,
1722 # and allows importing with a dirty wdir. It also aborts instead of leaving
1723 # rejects.
1724 opts[b'bypass'] = True
1725
1726 # Mandatory default values, synced with commands.import
1727 opts[b'strip'] = 1
1728 opts[b'prefix'] = b''
1729 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
1730 opts[b'obsolete'] = False
1731
1732 def _write(patches):
1733 parents = repo[None].parents()
1734
1735 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
1736 for drev, contents in patches:
1737 ui.status(_(b'applying patch from D%s\n') % drev)
1738
1739 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
1740 msg, node, rej = cmdutil.tryimportone(
1741 ui,
1742 repo,
1743 patchdata,
1744 parents,
1745 opts,
1746 [],
1747 None, # Never update wdir to another revision
1748 )
1749
1750 if not node:
1751 raise error.Abort(_(b'D%s: no diffs found') % drev)
1752
1753 ui.note(msg + b'\n')
1754 parents = [repo[node]]
1755
1756 opts = pycompat.byteskwargs(opts)
1757 if opts.get(b'stack'):
1758 spec = b':(%s)' % spec
1759 drevs = querydrev(repo.ui, spec)
1760
1761 readpatch(repo.ui, drevs, _write)
1762
1763
1764 @vcrcommand(
1706 1765 b'phabupdate',
1707 1766 [
1708 1767 (b'', b'accept', False, _(b'accept revisions')),
1709 1768 (b'', b'reject', False, _(b'reject revisions')),
1710 1769 (b'', b'abandon', False, _(b'abandon revisions')),
1711 1770 (b'', b'reclaim', False, _(b'reclaim revisions')),
1712 1771 (b'm', b'comment', b'', _(b'comment on the last revision')),
1713 1772 ],
1714 1773 _(b'DREVSPEC [OPTIONS]'),
1715 1774 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1716 1775 optionalrepo=True,
1717 1776 )
1718 1777 def phabupdate(ui, repo, spec, **opts):
1719 1778 """update Differential Revision in batch
1720 1779
1721 1780 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
1722 1781 """
1723 1782 opts = pycompat.byteskwargs(opts)
1724 1783 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
1725 1784 if len(flags) > 1:
1726 1785 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
1727 1786
1728 1787 actions = []
1729 1788 for f in flags:
1730 1789 actions.append({b'type': f, b'value': True})
1731 1790
1732 1791 drevs = querydrev(ui, spec)
1733 1792 for i, drev in enumerate(drevs):
1734 1793 if i + 1 == len(drevs) and opts.get(b'comment'):
1735 1794 actions.append({b'type': b'comment', b'value': opts[b'comment']})
1736 1795 if actions:
1737 1796 params = {
1738 1797 b'objectIdentifier': drev[b'phid'],
1739 1798 b'transactions': actions,
1740 1799 }
1741 1800 callconduit(ui, b'differential.revision.edit', params)
1742 1801
1743 1802
1744 1803 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
1745 1804 def template_review(context, mapping):
1746 1805 """:phabreview: Object describing the review for this changeset.
1747 1806 Has attributes `url` and `id`.
1748 1807 """
1749 1808 ctx = context.resource(mapping, b'ctx')
1750 1809 m = _differentialrevisiondescre.search(ctx.description())
1751 1810 if m:
1752 1811 return templateutil.hybriddict(
1753 1812 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
1754 1813 )
1755 1814 else:
1756 1815 tags = ctx.repo().nodetags(ctx.node())
1757 1816 for t in tags:
1758 1817 if _differentialrevisiontagre.match(t):
1759 1818 url = ctx.repo().ui.config(b'phabricator', b'url')
1760 1819 if not url.endswith(b'/'):
1761 1820 url += b'/'
1762 1821 url += t
1763 1822
1764 1823 return templateutil.hybriddict({b'url': url, b'id': t,})
1765 1824 return None
1766 1825
1767 1826
1768 1827 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
1769 1828 def template_status(context, mapping):
1770 1829 """:phabstatus: String. Status of Phabricator differential.
1771 1830 """
1772 1831 ctx = context.resource(mapping, b'ctx')
1773 1832 repo = context.resource(mapping, b'repo')
1774 1833 ui = context.resource(mapping, b'ui')
1775 1834
1776 1835 rev = ctx.rev()
1777 1836 try:
1778 1837 drevid = getdrevmap(repo, [rev])[rev]
1779 1838 except KeyError:
1780 1839 return None
1781 1840 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
1782 1841 for drev in drevs:
1783 1842 if int(drev[b'id']) == drevid:
1784 1843 return templateutil.hybriddict(
1785 1844 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
1786 1845 )
1787 1846 return None
1788 1847
1789 1848
1790 1849 @show.showview(b'phabstatus', csettopic=b'work')
1791 1850 def phabstatusshowview(ui, repo, displayer):
1792 1851 """Phabricator differiential status"""
1793 1852 revs = repo.revs('sort(_underway(), topo)')
1794 1853 drevmap = getdrevmap(repo, revs)
1795 1854 unknownrevs, drevids, revsbydrevid = [], set(), {}
1796 1855 for rev, drevid in pycompat.iteritems(drevmap):
1797 1856 if drevid is not None:
1798 1857 drevids.add(drevid)
1799 1858 revsbydrevid.setdefault(drevid, set()).add(rev)
1800 1859 else:
1801 1860 unknownrevs.append(rev)
1802 1861
1803 1862 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
1804 1863 drevsbyrev = {}
1805 1864 for drev in drevs:
1806 1865 for rev in revsbydrevid[int(drev[b'id'])]:
1807 1866 drevsbyrev[rev] = drev
1808 1867
1809 1868 def phabstatus(ctx):
1810 1869 drev = drevsbyrev[ctx.rev()]
1811 1870 status = ui.label(
1812 1871 b'%(statusName)s' % drev,
1813 1872 b'phabricator.status.%s' % _getstatusname(drev),
1814 1873 )
1815 1874 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
1816 1875
1817 1876 revs -= smartset.baseset(unknownrevs)
1818 1877 revdag = graphmod.dagwalker(repo, revs)
1819 1878
1820 1879 ui.setconfig(b'experimental', b'graphshorten', True)
1821 1880 displayer._exthook = phabstatus
1822 1881 nodelen = show.longestshortest(repo, revs)
1823 1882 logcmdutil.displaygraph(
1824 1883 ui,
1825 1884 repo,
1826 1885 revdag,
1827 1886 displayer,
1828 1887 graphmod.asciiedges,
1829 1888 props={b'nodelen': nodelen},
1830 1889 )
@@ -1,326 +1,349 b''
1 1 #require vcr
2 2 $ cat >> $HGRCPATH <<EOF
3 3 > [extensions]
4 4 > phabricator =
5 5 > EOF
6 6 $ hg init repo
7 7 $ cd repo
8 8 $ cat >> .hg/hgrc <<EOF
9 9 > [phabricator]
10 10 > url = https://phab.mercurial-scm.org/
11 11 > callsign = HG
12 12 >
13 13 > [auth]
14 14 > hgphab.schemes = https
15 15 > hgphab.prefix = phab.mercurial-scm.org
16 16 > # When working on the extension and making phabricator interaction
17 17 > # changes, edit this to be a real phabricator token. When done, edit
18 18 > # it back. The VCR transcripts will be auto-sanitised to replace your real
19 19 > # token with this value.
20 20 > hgphab.phabtoken = cli-hahayouwish
21 21 > EOF
22 22 $ VCR="$TESTDIR/phabricator"
23 23
24 24 Error is handled reasonably. We override the phabtoken here so that
25 25 when you're developing changes to phabricator.py you can edit the
26 26 above config and have a real token in the test but not have to edit
27 27 this test.
28 28 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
29 29 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
30 30 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
31 31
32 32 Missing arguments print the command help
33 33
34 34 $ hg phabread
35 35 hg phabread: invalid arguments
36 36 hg phabread DREVSPEC [OPTIONS]
37 37
38 38 print patches from Phabricator suitable for importing
39 39
40 40 options:
41 41
42 42 --stack read dependencies
43 43
44 44 (use 'hg phabread -h' to show more help)
45 45 [255]
46 46
47 47 Basic phabread:
48 48 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
49 49 # HG changeset patch
50 50 # Date 1536771503 0
51 51 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
52 52 exchangev2: start to implement pull with wire protocol v2
53 53
54 54 Wire protocol version 2 will take a substantially different
55 55 approach to exchange than version 1 (at least as far as pulling
56 56 is concerned).
57 57
58 58 This commit establishes a new exchangev2 module for holding
59 59
60 60 phabupdate with an accept:
61 61 $ hg phabupdate --accept D4564 \
62 62 > -m 'I think I like where this is headed. Will read rest of series later.'\
63 63 > --test-vcr "$VCR/accept-4564.json"
64 64 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
65 65 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
66 66 [255]
67 67 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
68 68
69 69 Create a differential diff:
70 70 $ HGENCODING=utf-8; export HGENCODING
71 71 $ echo alpha > alpha
72 72 $ hg ci --addremove -m 'create alpha for phabricator test €'
73 73 adding alpha
74 74 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
75 75 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
76 76 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
77 77 $ echo more >> alpha
78 78 $ HGEDITOR=true hg ci --amend
79 79 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
80 80 $ echo beta > beta
81 81 $ hg ci --addremove -m 'create beta for phabricator test'
82 82 adding beta
83 83 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
84 84 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
85 85 D7916 - created - 9e6901f21d5b: create beta for phabricator test
86 86 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
87 87 $ unset HGENCODING
88 88
89 89 The amend won't explode after posting a public commit. The local tag is left
90 90 behind to identify it.
91 91
92 92 $ echo 'public change' > beta
93 93 $ hg ci -m 'create public change for phabricator testing'
94 94 $ hg phase --public .
95 95 $ echo 'draft change' > alpha
96 96 $ hg ci -m 'create draft change for phabricator testing'
97 97 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
98 98 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
99 99 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
100 100 warning: not updating public commit 2:7b4185ab5d16
101 101 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
102 102 $ hg tags -v
103 103 tip 3:3244dc4a3334
104 104 D7917 2:7b4185ab5d16 local
105 105
106 106 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
107 107 > {
108 108 > "constraints": {
109 109 > "isBot": true
110 110 > }
111 111 > }
112 112 > EOF
113 113 {
114 114 "cursor": {
115 115 "after": null,
116 116 "before": null,
117 117 "limit": 100,
118 118 "order": null
119 119 },
120 120 "data": [],
121 121 "maps": {},
122 122 "query": {
123 123 "queryKey": null
124 124 }
125 125 }
126 126
127 127 Template keywords
128 128 $ hg log -T'{rev} {phabreview|json}\n'
129 129 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
130 130 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
131 131 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
132 132 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
133 133
134 134 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
135 135 3 https://phab.mercurial-scm.org/D7918 D7918
136 136 2 https://phab.mercurial-scm.org/D7917 D7917
137 137 1 https://phab.mercurial-scm.org/D7916 D7916
138 138 0 https://phab.mercurial-scm.org/D7915 D7915
139 139
140 140 Commenting when phabsending:
141 141 $ echo comment > comment
142 142 $ hg ci --addremove -m "create comment for phabricator test"
143 143 adding comment
144 144 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
145 145 D7919 - created - d5dddca9023d: create comment for phabricator test
146 146 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
147 147 $ echo comment2 >> comment
148 148 $ hg ci --amend
149 149 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
150 150 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
151 151 D7919 - updated - 1849d7828727: create comment for phabricator test
152 152
153 153 Phabsending a skipped commit:
154 154 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
155 155 D7919 - skipped - 1849d7828727: create comment for phabricator test
156 156
157 157 Phabesending a new binary, a modified binary, and a removed binary
158 158
159 159 >>> open('bin', 'wb').write(b'\0a') and None
160 160 $ hg ci -Am 'add binary'
161 161 adding bin
162 162 >>> open('bin', 'wb').write(b'\0b') and None
163 163 $ hg ci -m 'modify binary'
164 164 $ hg rm bin
165 165 $ hg ci -m 'remove binary'
166 166 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
167 167 uploading bin@aa24a81f55de
168 168 D8007 - created - aa24a81f55de: add binary
169 169 uploading bin@d8d62a881b54
170 170 D8008 - created - d8d62a881b54: modify binary
171 171 D8009 - created - af55645b2e29: remove binary
172 172 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
173 173
174 174 Phabsend a renamed binary and a copied binary, with and without content changes
175 175 to src and dest
176 176
177 177 >>> open('bin2', 'wb').write(b'\0c') and None
178 178 $ hg ci -Am 'add another binary'
179 179 adding bin2
180 180
181 181 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
182 182 looks much different than when viewing "bin2_moved". No idea if this is a phab
183 183 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
184 184 though.
185 185
186 186 $ hg mv bin2 bin2_moved
187 187 $ hg ci -m "moved binary"
188 188
189 189 Note: "bin2_moved" is also not viewable in phabricator with this review
190 190
191 191 $ hg cp bin2_moved bin2_copied
192 192 $ hg ci -m "copied binary"
193 193
194 194 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
195 195 are viewable in their proper state. "bin2_copied" is not viewable, and not
196 196 listed as binary in phabricator.
197 197
198 198 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
199 199 $ hg mv bin2_copied bin2_moved_again
200 200 $ hg ci -m "move+mod copied binary"
201 201
202 202 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
203 203 viewable on each side.
204 204
205 205 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
206 206 $ hg cp bin2_moved bin2_moved_copied
207 207 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
208 208 $ hg ci -m "copy+mod moved binary"
209 209
210 210 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
211 211 uploading bin2@f42f9195e00c
212 212 D8128 - created - f42f9195e00c: add another binary
213 213 D8129 - created - 834ab31d80ae: moved binary
214 214 D8130 - created - 494b750e5194: copied binary
215 215 uploading bin2_moved_again@25f766b50cc2
216 216 D8131 - created - 25f766b50cc2: move+mod copied binary
217 217 uploading bin2_moved_copied@1b87b363a5e4
218 218 uploading bin2_moved@1b87b363a5e4
219 219 D8132 - created - 1b87b363a5e4: copy+mod moved binary
220 220 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
221 221
222 222 Phabreading a DREV with a local:commits time as a string:
223 223 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
224 224 # HG changeset patch
225 225 # User Pulkit Goyal <7895pulkit@gmail.com>
226 226 # Date 1509404054 -19800
227 227 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
228 228 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
229 229 repoview: add a new attribute _visibilityexceptions and related API
230 230
231 231 Currently we don't have a defined way in core to make some hidden revisions
232 232 visible in filtered repo. Extensions to achieve the purpose of unhiding some
233 233 hidden commits, wrap repoview.pinnedrevs() function.
234 234
235 235 To make the above task simple and have well defined API, this patch adds a new
236 236 attribute '_visibilityexceptions' to repoview class which will contains
237 237 the hidden revs which should be exception.
238 238 This will allow to set different exceptions for different repoview objects
239 239 backed by the same unfiltered repo.
240 240
241 241 This patch also adds API to add revs to the attribute set and get them.
242 242
243 243 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
244 244
245 245 Differential Revision: https://phab.mercurial-scm.org/D1285
246 246 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
247 247 --- a/mercurial/repoview.py
248 248 +++ b/mercurial/repoview.py
249 249 @@ * @@ (glob)
250 250 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
251 251 """
252 252
253 253 + # hidden revs which should be visible
254 254 + _visibilityexceptions = set()
255 255 +
256 256 def __init__(self, repo, filtername):
257 257 object.__setattr__(self, r'_unfilteredrepo', repo)
258 258 object.__setattr__(self, r'filtername', filtername)
259 259 @@ -231,6 +234,14 @@
260 260 return self
261 261 return self.unfiltered().filtered(name)
262 262
263 263 + def addvisibilityexceptions(self, revs):
264 264 + """adds hidden revs which should be visible to set of exceptions"""
265 265 + self._visibilityexceptions.update(revs)
266 266 +
267 267 + def getvisibilityexceptions(self):
268 268 + """returns the set of hidden revs which should be visible"""
269 269 + return self._visibilityexceptions
270 270 +
271 271 # everything access are forwarded to the proxied repo
272 272 def __getattr__(self, attr):
273 273 return getattr(self._unfilteredrepo, attr)
274 274 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
275 275 --- a/mercurial/localrepo.py
276 276 +++ b/mercurial/localrepo.py
277 277 @@ -570,6 +570,14 @@
278 278 def close(self):
279 279 self._writecaches()
280 280
281 281 + def addvisibilityexceptions(self, exceptions):
282 282 + # should be called on a filtered repository
283 283 + pass
284 284 +
285 285 + def getvisibilityexceptions(self):
286 286 + # should be called on a filtered repository
287 287 + return set()
288 288 +
289 289 def _loadextensions(self):
290 290 extensions.loadall(self.ui)
291 291
292 292
293 293 A bad .arcconfig doesn't error out
294 294 $ echo 'garbage' > .arcconfig
295 295 $ hg config phabricator --debug
296 296 invalid JSON in $TESTTMP/repo/.arcconfig
297 297 read config from: */.hgrc (glob)
298 298 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
299 299 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
300 300
301 301 The .arcconfig content overrides global config
302 302 $ cat >> $HGRCPATH << EOF
303 303 > [phabricator]
304 304 > url = global
305 305 > callsign = global
306 306 > EOF
307 307 $ cp $TESTDIR/../.arcconfig .
308 308 $ mv .hg/hgrc .hg/hgrc.bak
309 309 $ hg config phabricator --debug
310 310 read config from: */.hgrc (glob)
311 311 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
312 312 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
313 313
314 314 But it doesn't override local config
315 315 $ cat >> .hg/hgrc << EOF
316 316 > [phabricator]
317 317 > url = local
318 318 > callsign = local
319 319 > EOF
320 320 $ hg config phabricator --debug
321 321 read config from: */.hgrc (glob)
322 322 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
323 323 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
324 324 $ mv .hg/hgrc.bak .hg/hgrc
325 325
326 Phabimport works with a stack
327
326 328 $ cd ..
329 $ hg clone repo repo2 -qr 1
330 $ cp repo/.hg/hgrc repo2/.hg/
331 $ cd repo2
332 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
333 applying patch from D7917
334 applying patch from D7918
335 $ hg log -G -Tcompact
336 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
337 | create draft change for phabricator testing
338 |
339 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
340 | create public change for phabricator testing
341 |
342 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
343 | create beta for phabricator test
344 |
345 o 0 c44b38f24a45 1970-01-01 00:00 +0000 test
346 create alpha for phabricator test \x80 (esc)
347
348
349 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now