Show More
@@ -0,0 +1,141 b'' | |||
|
1 | { | |
|
2 | "version": 1, | |
|
3 | "interactions": [ | |
|
4 | { | |
|
5 | "response": { | |
|
6 | "headers": { | |
|
7 | "x-xss-protection": [ | |
|
8 | "1; mode=block" | |
|
9 | ], | |
|
10 | "expires": [ | |
|
11 | "Sat, 01 Jan 2000 00:00:00 GMT" | |
|
12 | ], | |
|
13 | "server": [ | |
|
14 | "Apache/2.4.10 (Debian)" | |
|
15 | ], | |
|
16 | "date": [ | |
|
17 | "Wed, 15 Jul 2020 17:23:27 GMT" | |
|
18 | ], | |
|
19 | "cache-control": [ | |
|
20 | "no-store" | |
|
21 | ], | |
|
22 | "content-type": [ | |
|
23 | "application/json" | |
|
24 | ], | |
|
25 | "transfer-encoding": [ | |
|
26 | "chunked" | |
|
27 | ], | |
|
28 | "strict-transport-security": [ | |
|
29 | "max-age=0; includeSubdomains; preload" | |
|
30 | ], | |
|
31 | "x-frame-options": [ | |
|
32 | "Deny" | |
|
33 | ], | |
|
34 | "referrer-policy": [ | |
|
35 | "no-referrer" | |
|
36 | ], | |
|
37 | "x-content-type-options": [ | |
|
38 | "nosniff" | |
|
39 | ] | |
|
40 | }, | |
|
41 | "body": { | |
|
42 | "string": "{\"result\":[{\"id\":\"6876\",\"phid\":\"PHID-DREV-looitrxgt3omaau7a7qk\",\"title\":\"phabricator: support automatically obsoleting old revisions of pulled commits\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6876\",\"dateCreated\":\"1569388644\",\"dateModified\":\"1579887103\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"1\",\"statusName\":\"Needs Revision\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":68,\"lines.removed\":1},\"branch\":null,\"summary\":\"This is basically an import of the `pullcreatemarkers` extension[1] from the FB\\nrepo, with minor adjustments to `getmatchingdiff()` to work with modern hg.\\nSince this is very phabricator specific, it makes more sense to me to bundle it\\ninto the existing extension. It wasn't very obvious from the old name what\\nfunctionality was provided, and it may make sense to do this in other scenarios\\nbesides `hg pull`.\\n\\nThere are two use cases that I can see- first, ensuring that old revisions are\\ncleaned up for a contributor (I seem to recall something I submitted recently\\nneeded to be explicitly pruned, though most submissions do clean up\\nautomatically). Second, any `hg phabread | hg import -` would otherwise need to\\nbe manually cleaned up. The latter is annoying enough that I tend not to grab\\nthe code and try it when reviewing.\\n\\nIt is currently guarded by a config option (off by default), because @marmoute\\nexpressed concerns about duplicate marker creation if the pushing reviewer also\\ncreates a marker. I don't think that's possible here, since the obsolete\\nrevisions are explicitly excluded. But maybe there are other reasons someone\\nwouldn't want older revisions obsoleted. The config name reflects the fact that\\nI'm not sure if other things like import should get this too.\\n\\nI suspect that we could wrap a function deeper in the pull sequence to improve\\nboth the code and the UX. For example, when pulling an obsolete marker, it can\\nprint out a warning that the working directory parent is obsolete, but that\\ndoesn't happen here. (It won't happen with this test. It *should* without the\\n`--bypass` option, but doesn't.) It should also be possible to not have to\\nquery the range of new revisions, and maybe it can be added to the existing\\ntransaction.\\n\\n[1] https:\\/\\/bitbucket.org\\/facebook\\/hg-experimental\\/src\\/default\\/hgext3rd\\/pullcreatemarkers.py\",\"testPlan\":\"\",\"lineCount\":\"69\",\"activeDiffPHID\":\"PHID-DIFF-jdpqpzciqcooaxf2kojh\",\"diffs\":[\"16604\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-cah4b6i3kszy6debh3bl\":\"PHID-USER-cah4b6i3kszy6debh3bl\"},\"ccs\":[\"PHID-USER-34jnztnonbr4lhwuybwl\",\"PHID-USER-e66t6wbudjtigdnqbl3e\",\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\",\"PHID-USER-vflsibccj4unqydwfvne\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-2dbanvk64h5wguhxta2o\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}" | |
|
43 | }, | |
|
44 | "status": { | |
|
45 | "message": "OK", | |
|
46 | "code": 200 | |
|
47 | } | |
|
48 | }, | |
|
49 | "request": { | |
|
50 | "uri": "https://phab.mercurial-scm.org//api/differential.query", | |
|
51 | "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B6876%5D%7D&__conduit__=1", | |
|
52 | "method": "POST", | |
|
53 | "headers": { | |
|
54 | "content-length": [ | |
|
55 | "146" | |
|
56 | ], | |
|
57 | "accept": [ | |
|
58 | "application/mercurial-0.1" | |
|
59 | ], | |
|
60 | "content-type": [ | |
|
61 | "application/x-www-form-urlencoded" | |
|
62 | ], | |
|
63 | "user-agent": [ | |
|
64 | "mercurial/proto-1.0 (Mercurial 5.4.2+207-8403cc54bc83+20200709)" | |
|
65 | ], | |
|
66 | "host": [ | |
|
67 | "phab.mercurial-scm.org" | |
|
68 | ] | |
|
69 | } | |
|
70 | } | |
|
71 | }, | |
|
72 | { | |
|
73 | "response": { | |
|
74 | "headers": { | |
|
75 | "x-xss-protection": [ | |
|
76 | "1; mode=block" | |
|
77 | ], | |
|
78 | "expires": [ | |
|
79 | "Sat, 01 Jan 2000 00:00:00 GMT" | |
|
80 | ], | |
|
81 | "server": [ | |
|
82 | "Apache/2.4.10 (Debian)" | |
|
83 | ], | |
|
84 | "date": [ | |
|
85 | "Wed, 15 Jul 2020 17:23:28 GMT" | |
|
86 | ], | |
|
87 | "cache-control": [ | |
|
88 | "no-store" | |
|
89 | ], | |
|
90 | "content-type": [ | |
|
91 | "application/json" | |
|
92 | ], | |
|
93 | "transfer-encoding": [ | |
|
94 | "chunked" | |
|
95 | ], | |
|
96 | "strict-transport-security": [ | |
|
97 | "max-age=0; includeSubdomains; preload" | |
|
98 | ], | |
|
99 | "x-frame-options": [ | |
|
100 | "Deny" | |
|
101 | ], | |
|
102 | "referrer-policy": [ | |
|
103 | "no-referrer" | |
|
104 | ], | |
|
105 | "x-content-type-options": [ | |
|
106 | "nosniff" | |
|
107 | ] | |
|
108 | }, | |
|
109 | "body": { | |
|
110 | "string": "{\"result\":{\"object\":{\"id\":6876,\"phid\":\"PHID-DREV-looitrxgt3omaau7a7qk\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-g2xkgr2sjkrmhcr\"},{\"phid\":\"PHID-XACT-DREV-lgbrex6poz6x5pk\"}]},\"error_code\":null,\"error_info\":null}" | |
|
111 | }, | |
|
112 | "status": { | |
|
113 | "message": "OK", | |
|
114 | "code": 200 | |
|
115 | } | |
|
116 | }, | |
|
117 | "request": { | |
|
118 | "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit", | |
|
119 | "body": "output=json¶ms=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+%22PHID-DREV-looitrxgt3omaau7a7qk%22%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22plan-changes%22%2C+%22value%22%3A+true%7D%5D%7D&__conduit__=1", | |
|
120 | "method": "POST", | |
|
121 | "headers": { | |
|
122 | "content-length": [ | |
|
123 | "278" | |
|
124 | ], | |
|
125 | "accept": [ | |
|
126 | "application/mercurial-0.1" | |
|
127 | ], | |
|
128 | "content-type": [ | |
|
129 | "application/x-www-form-urlencoded" | |
|
130 | ], | |
|
131 | "user-agent": [ | |
|
132 | "mercurial/proto-1.0 (Mercurial 5.4.2+207-8403cc54bc83+20200709)" | |
|
133 | ], | |
|
134 | "host": [ | |
|
135 | "phab.mercurial-scm.org" | |
|
136 | ] | |
|
137 | } | |
|
138 | } | |
|
139 | } | |
|
140 | ] | |
|
141 | } No newline at end of file |
@@ -1,2285 +1,2293 b'' | |||
|
1 | 1 | # phabricator.py - simple Phabricator integration |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2017 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | """simple Phabricator integration (EXPERIMENTAL) |
|
8 | 8 | |
|
9 | 9 | This extension provides a ``phabsend`` command which sends a stack of |
|
10 | 10 | changesets to Phabricator, and a ``phabread`` command which prints a stack of |
|
11 | 11 | revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command |
|
12 | 12 | to update statuses in batch. |
|
13 | 13 | |
|
14 | 14 | A "phabstatus" view for :hg:`show` is also provided; it displays status |
|
15 | 15 | information of Phabricator differentials associated with unfinished |
|
16 | 16 | changesets. |
|
17 | 17 | |
|
18 | 18 | By default, Phabricator requires ``Test Plan`` which might prevent some |
|
19 | 19 | changeset from being sent. The requirement could be disabled by changing |
|
20 | 20 | ``differential.require-test-plan-field`` config server side. |
|
21 | 21 | |
|
22 | 22 | Config:: |
|
23 | 23 | |
|
24 | 24 | [phabricator] |
|
25 | 25 | # Phabricator URL |
|
26 | 26 | url = https://phab.example.com/ |
|
27 | 27 | |
|
28 | 28 | # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its |
|
29 | 29 | # callsign is "FOO". |
|
30 | 30 | callsign = FOO |
|
31 | 31 | |
|
32 | 32 | # curl command to use. If not set (default), use builtin HTTP library to |
|
33 | 33 | # communicate. If set, use the specified curl command. This could be useful |
|
34 | 34 | # if you need to specify advanced options that is not easily supported by |
|
35 | 35 | # the internal library. |
|
36 | 36 | curlcmd = curl --connect-timeout 2 --retry 3 --silent |
|
37 | 37 | |
|
38 | 38 | [auth] |
|
39 | 39 | example.schemes = https |
|
40 | 40 | example.prefix = phab.example.com |
|
41 | 41 | |
|
42 | 42 | # API token. Get it from https://$HOST/conduit/login/ |
|
43 | 43 | example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx |
|
44 | 44 | """ |
|
45 | 45 | |
|
46 | 46 | from __future__ import absolute_import |
|
47 | 47 | |
|
48 | 48 | import base64 |
|
49 | 49 | import contextlib |
|
50 | 50 | import hashlib |
|
51 | 51 | import itertools |
|
52 | 52 | import json |
|
53 | 53 | import mimetypes |
|
54 | 54 | import operator |
|
55 | 55 | import re |
|
56 | 56 | |
|
57 | 57 | from mercurial.node import bin, nullid, short |
|
58 | 58 | from mercurial.i18n import _ |
|
59 | 59 | from mercurial.pycompat import getattr |
|
60 | 60 | from mercurial.thirdparty import attr |
|
61 | 61 | from mercurial import ( |
|
62 | 62 | cmdutil, |
|
63 | 63 | context, |
|
64 | 64 | copies, |
|
65 | 65 | encoding, |
|
66 | 66 | error, |
|
67 | 67 | exthelper, |
|
68 | 68 | graphmod, |
|
69 | 69 | httpconnection as httpconnectionmod, |
|
70 | 70 | localrepo, |
|
71 | 71 | logcmdutil, |
|
72 | 72 | match, |
|
73 | 73 | mdiff, |
|
74 | 74 | obsutil, |
|
75 | 75 | parser, |
|
76 | 76 | patch, |
|
77 | 77 | phases, |
|
78 | 78 | pycompat, |
|
79 | 79 | scmutil, |
|
80 | 80 | smartset, |
|
81 | 81 | tags, |
|
82 | 82 | templatefilters, |
|
83 | 83 | templateutil, |
|
84 | 84 | url as urlmod, |
|
85 | 85 | util, |
|
86 | 86 | ) |
|
87 | 87 | from mercurial.utils import ( |
|
88 | 88 | procutil, |
|
89 | 89 | stringutil, |
|
90 | 90 | ) |
|
91 | 91 | from . import show |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
95 | 95 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
96 | 96 | # be specifying the version(s) of Mercurial they are tested with, or |
|
97 | 97 | # leave the attribute unspecified. |
|
98 | 98 | testedwith = b'ships-with-hg-core' |
|
99 | 99 | |
|
100 | 100 | eh = exthelper.exthelper() |
|
101 | 101 | |
|
102 | 102 | cmdtable = eh.cmdtable |
|
103 | 103 | command = eh.command |
|
104 | 104 | configtable = eh.configtable |
|
105 | 105 | templatekeyword = eh.templatekeyword |
|
106 | 106 | uisetup = eh.finaluisetup |
|
107 | 107 | |
|
108 | 108 | # developer config: phabricator.batchsize |
|
109 | 109 | eh.configitem( |
|
110 | 110 | b'phabricator', b'batchsize', default=12, |
|
111 | 111 | ) |
|
112 | 112 | eh.configitem( |
|
113 | 113 | b'phabricator', b'callsign', default=None, |
|
114 | 114 | ) |
|
115 | 115 | eh.configitem( |
|
116 | 116 | b'phabricator', b'curlcmd', default=None, |
|
117 | 117 | ) |
|
118 | 118 | # developer config: phabricator.debug |
|
119 | 119 | eh.configitem( |
|
120 | 120 | b'phabricator', b'debug', default=False, |
|
121 | 121 | ) |
|
122 | 122 | # developer config: phabricator.repophid |
|
123 | 123 | eh.configitem( |
|
124 | 124 | b'phabricator', b'repophid', default=None, |
|
125 | 125 | ) |
|
126 | 126 | eh.configitem( |
|
127 | 127 | b'phabricator', b'url', default=None, |
|
128 | 128 | ) |
|
129 | 129 | eh.configitem( |
|
130 | 130 | b'phabsend', b'confirm', default=False, |
|
131 | 131 | ) |
|
132 | 132 | eh.configitem( |
|
133 | 133 | b'phabimport', b'secret', default=False, |
|
134 | 134 | ) |
|
135 | 135 | eh.configitem( |
|
136 | 136 | b'phabimport', b'obsolete', default=False, |
|
137 | 137 | ) |
|
138 | 138 | |
|
139 | 139 | colortable = { |
|
140 | 140 | b'phabricator.action.created': b'green', |
|
141 | 141 | b'phabricator.action.skipped': b'magenta', |
|
142 | 142 | b'phabricator.action.updated': b'magenta', |
|
143 | 143 | b'phabricator.desc': b'', |
|
144 | 144 | b'phabricator.drev': b'bold', |
|
145 | 145 | b'phabricator.node': b'', |
|
146 | 146 | b'phabricator.status.abandoned': b'magenta dim', |
|
147 | 147 | b'phabricator.status.accepted': b'green bold', |
|
148 | 148 | b'phabricator.status.closed': b'green', |
|
149 | 149 | b'phabricator.status.needsreview': b'yellow', |
|
150 | 150 | b'phabricator.status.needsrevision': b'red', |
|
151 | 151 | b'phabricator.status.changesplanned': b'red', |
|
152 | 152 | } |
|
153 | 153 | |
|
154 | 154 | _VCR_FLAGS = [ |
|
155 | 155 | ( |
|
156 | 156 | b'', |
|
157 | 157 | b'test-vcr', |
|
158 | 158 | b'', |
|
159 | 159 | _( |
|
160 | 160 | b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
|
161 | 161 | b', otherwise will mock all http requests using the specified vcr file.' |
|
162 | 162 | b' (ADVANCED)' |
|
163 | 163 | ), |
|
164 | 164 | ), |
|
165 | 165 | ] |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | @eh.wrapfunction(localrepo, "loadhgrc") |
|
169 | 169 | def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements): |
|
170 | 170 | """Load ``.arcconfig`` content into a ui instance on repository open. |
|
171 | 171 | """ |
|
172 | 172 | result = False |
|
173 | 173 | arcconfig = {} |
|
174 | 174 | |
|
175 | 175 | try: |
|
176 | 176 | # json.loads only accepts bytes from 3.6+ |
|
177 | 177 | rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig")) |
|
178 | 178 | # json.loads only returns unicode strings |
|
179 | 179 | arcconfig = pycompat.rapply( |
|
180 | 180 | lambda x: encoding.unitolocal(x) |
|
181 | 181 | if isinstance(x, pycompat.unicode) |
|
182 | 182 | else x, |
|
183 | 183 | pycompat.json_loads(rawparams), |
|
184 | 184 | ) |
|
185 | 185 | |
|
186 | 186 | result = True |
|
187 | 187 | except ValueError: |
|
188 | 188 | ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig")) |
|
189 | 189 | except IOError: |
|
190 | 190 | pass |
|
191 | 191 | |
|
192 | 192 | cfg = util.sortdict() |
|
193 | 193 | |
|
194 | 194 | if b"repository.callsign" in arcconfig: |
|
195 | 195 | cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"] |
|
196 | 196 | |
|
197 | 197 | if b"phabricator.uri" in arcconfig: |
|
198 | 198 | cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"] |
|
199 | 199 | |
|
200 | 200 | if cfg: |
|
201 | 201 | ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig")) |
|
202 | 202 | |
|
203 | 203 | return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc |
|
204 | 204 | |
|
205 | 205 | |
|
206 | 206 | def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): |
|
207 | 207 | fullflags = flags + _VCR_FLAGS |
|
208 | 208 | |
|
209 | 209 | def hgmatcher(r1, r2): |
|
210 | 210 | if r1.uri != r2.uri or r1.method != r2.method: |
|
211 | 211 | return False |
|
212 | 212 | r1params = util.urlreq.parseqs(r1.body) |
|
213 | 213 | r2params = util.urlreq.parseqs(r2.body) |
|
214 | 214 | for key in r1params: |
|
215 | 215 | if key not in r2params: |
|
216 | 216 | return False |
|
217 | 217 | value = r1params[key][0] |
|
218 | 218 | # we want to compare json payloads without worrying about ordering |
|
219 | 219 | if value.startswith(b'{') and value.endswith(b'}'): |
|
220 | 220 | r1json = pycompat.json_loads(value) |
|
221 | 221 | r2json = pycompat.json_loads(r2params[key][0]) |
|
222 | 222 | if r1json != r2json: |
|
223 | 223 | return False |
|
224 | 224 | elif r2params[key][0] != value: |
|
225 | 225 | return False |
|
226 | 226 | return True |
|
227 | 227 | |
|
228 | 228 | def sanitiserequest(request): |
|
229 | 229 | request.body = re.sub( |
|
230 | 230 | br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body |
|
231 | 231 | ) |
|
232 | 232 | return request |
|
233 | 233 | |
|
234 | 234 | def sanitiseresponse(response): |
|
235 | 235 | if 'set-cookie' in response['headers']: |
|
236 | 236 | del response['headers']['set-cookie'] |
|
237 | 237 | return response |
|
238 | 238 | |
|
239 | 239 | def decorate(fn): |
|
240 | 240 | def inner(*args, **kwargs): |
|
241 | 241 | if kwargs.get('test_vcr'): |
|
242 | 242 | cassette = pycompat.fsdecode(kwargs.pop('test_vcr')) |
|
243 | 243 | import hgdemandimport |
|
244 | 244 | |
|
245 | 245 | with hgdemandimport.deactivated(): |
|
246 | 246 | import vcr as vcrmod |
|
247 | 247 | import vcr.stubs as stubs |
|
248 | 248 | |
|
249 | 249 | vcr = vcrmod.VCR( |
|
250 | 250 | serializer='json', |
|
251 | 251 | before_record_request=sanitiserequest, |
|
252 | 252 | before_record_response=sanitiseresponse, |
|
253 | 253 | custom_patches=[ |
|
254 | 254 | ( |
|
255 | 255 | urlmod, |
|
256 | 256 | 'httpconnection', |
|
257 | 257 | stubs.VCRHTTPConnection, |
|
258 | 258 | ), |
|
259 | 259 | ( |
|
260 | 260 | urlmod, |
|
261 | 261 | 'httpsconnection', |
|
262 | 262 | stubs.VCRHTTPSConnection, |
|
263 | 263 | ), |
|
264 | 264 | ], |
|
265 | 265 | ) |
|
266 | 266 | vcr.register_matcher('hgmatcher', hgmatcher) |
|
267 | 267 | with vcr.use_cassette(cassette, match_on=['hgmatcher']): |
|
268 | 268 | return fn(*args, **kwargs) |
|
269 | 269 | return fn(*args, **kwargs) |
|
270 | 270 | |
|
271 | 271 | cmd = util.checksignature(inner, depth=2) |
|
272 | 272 | cmd.__name__ = fn.__name__ |
|
273 | 273 | cmd.__doc__ = fn.__doc__ |
|
274 | 274 | |
|
275 | 275 | return command( |
|
276 | 276 | name, |
|
277 | 277 | fullflags, |
|
278 | 278 | spec, |
|
279 | 279 | helpcategory=helpcategory, |
|
280 | 280 | optionalrepo=optionalrepo, |
|
281 | 281 | )(cmd) |
|
282 | 282 | |
|
283 | 283 | return decorate |
|
284 | 284 | |
|
285 | 285 | |
|
286 | 286 | def _debug(ui, *msg, **opts): |
|
287 | 287 | """write debug output for Phabricator if ``phabricator.debug`` is set |
|
288 | 288 | |
|
289 | 289 | Specifically, this avoids dumping Conduit and HTTP auth chatter that is |
|
290 | 290 | printed with the --debug argument. |
|
291 | 291 | """ |
|
292 | 292 | if ui.configbool(b"phabricator", b"debug"): |
|
293 | 293 | flag = ui.debugflag |
|
294 | 294 | try: |
|
295 | 295 | ui.debugflag = True |
|
296 | 296 | ui.write(*msg, **opts) |
|
297 | 297 | finally: |
|
298 | 298 | ui.debugflag = flag |
|
299 | 299 | |
|
300 | 300 | |
|
301 | 301 | def urlencodenested(params): |
|
302 | 302 | """like urlencode, but works with nested parameters. |
|
303 | 303 | |
|
304 | 304 | For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
|
305 | 305 | flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
|
306 | 306 | urlencode. Note: the encoding is consistent with PHP's http_build_query. |
|
307 | 307 | """ |
|
308 | 308 | flatparams = util.sortdict() |
|
309 | 309 | |
|
310 | 310 | def process(prefix, obj): |
|
311 | 311 | if isinstance(obj, bool): |
|
312 | 312 | obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form |
|
313 | 313 | lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] |
|
314 | 314 | items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) |
|
315 | 315 | if items is None: |
|
316 | 316 | flatparams[prefix] = obj |
|
317 | 317 | else: |
|
318 | 318 | for k, v in items(obj): |
|
319 | 319 | if prefix: |
|
320 | 320 | process(b'%s[%s]' % (prefix, k), v) |
|
321 | 321 | else: |
|
322 | 322 | process(k, v) |
|
323 | 323 | |
|
324 | 324 | process(b'', params) |
|
325 | 325 | return util.urlreq.urlencode(flatparams) |
|
326 | 326 | |
|
327 | 327 | |
|
328 | 328 | def readurltoken(ui): |
|
329 | 329 | """return conduit url, token and make sure they exist |
|
330 | 330 | |
|
331 | 331 | Currently read from [auth] config section. In the future, it might |
|
332 | 332 | make sense to read from .arcconfig and .arcrc as well. |
|
333 | 333 | """ |
|
334 | 334 | url = ui.config(b'phabricator', b'url') |
|
335 | 335 | if not url: |
|
336 | 336 | raise error.Abort( |
|
337 | 337 | _(b'config %s.%s is required') % (b'phabricator', b'url') |
|
338 | 338 | ) |
|
339 | 339 | |
|
340 | 340 | res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) |
|
341 | 341 | token = None |
|
342 | 342 | |
|
343 | 343 | if res: |
|
344 | 344 | group, auth = res |
|
345 | 345 | |
|
346 | 346 | ui.debug(b"using auth.%s.* for authentication\n" % group) |
|
347 | 347 | |
|
348 | 348 | token = auth.get(b'phabtoken') |
|
349 | 349 | |
|
350 | 350 | if not token: |
|
351 | 351 | raise error.Abort( |
|
352 | 352 | _(b'Can\'t find conduit token associated to %s') % (url,) |
|
353 | 353 | ) |
|
354 | 354 | |
|
355 | 355 | return url, token |
|
356 | 356 | |
|
357 | 357 | |
|
358 | 358 | def callconduit(ui, name, params): |
|
359 | 359 | """call Conduit API, params is a dict. return json.loads result, or None""" |
|
360 | 360 | host, token = readurltoken(ui) |
|
361 | 361 | url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() |
|
362 | 362 | ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) |
|
363 | 363 | params = params.copy() |
|
364 | 364 | params[b'__conduit__'] = { |
|
365 | 365 | b'token': token, |
|
366 | 366 | } |
|
367 | 367 | rawdata = { |
|
368 | 368 | b'params': templatefilters.json(params), |
|
369 | 369 | b'output': b'json', |
|
370 | 370 | b'__conduit__': 1, |
|
371 | 371 | } |
|
372 | 372 | data = urlencodenested(rawdata) |
|
373 | 373 | curlcmd = ui.config(b'phabricator', b'curlcmd') |
|
374 | 374 | if curlcmd: |
|
375 | 375 | sin, sout = procutil.popen2( |
|
376 | 376 | b'%s -d @- %s' % (curlcmd, procutil.shellquote(url)) |
|
377 | 377 | ) |
|
378 | 378 | sin.write(data) |
|
379 | 379 | sin.close() |
|
380 | 380 | body = sout.read() |
|
381 | 381 | else: |
|
382 | 382 | urlopener = urlmod.opener(ui, authinfo) |
|
383 | 383 | request = util.urlreq.request(pycompat.strurl(url), data=data) |
|
384 | 384 | with contextlib.closing(urlopener.open(request)) as rsp: |
|
385 | 385 | body = rsp.read() |
|
386 | 386 | ui.debug(b'Conduit Response: %s\n' % body) |
|
387 | 387 | parsed = pycompat.rapply( |
|
388 | 388 | lambda x: encoding.unitolocal(x) |
|
389 | 389 | if isinstance(x, pycompat.unicode) |
|
390 | 390 | else x, |
|
391 | 391 | # json.loads only accepts bytes from py3.6+ |
|
392 | 392 | pycompat.json_loads(encoding.unifromlocal(body)), |
|
393 | 393 | ) |
|
394 | 394 | if parsed.get(b'error_code'): |
|
395 | 395 | msg = _(b'Conduit Error (%s): %s') % ( |
|
396 | 396 | parsed[b'error_code'], |
|
397 | 397 | parsed[b'error_info'], |
|
398 | 398 | ) |
|
399 | 399 | raise error.Abort(msg) |
|
400 | 400 | return parsed[b'result'] |
|
401 | 401 | |
|
402 | 402 | |
|
403 | 403 | @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) |
|
404 | 404 | def debugcallconduit(ui, repo, name): |
|
405 | 405 | """call Conduit API |
|
406 | 406 | |
|
407 | 407 | Call parameters are read from stdin as a JSON blob. Result will be written |
|
408 | 408 | to stdout as a JSON blob. |
|
409 | 409 | """ |
|
410 | 410 | # json.loads only accepts bytes from 3.6+ |
|
411 | 411 | rawparams = encoding.unifromlocal(ui.fin.read()) |
|
412 | 412 | # json.loads only returns unicode strings |
|
413 | 413 | params = pycompat.rapply( |
|
414 | 414 | lambda x: encoding.unitolocal(x) |
|
415 | 415 | if isinstance(x, pycompat.unicode) |
|
416 | 416 | else x, |
|
417 | 417 | pycompat.json_loads(rawparams), |
|
418 | 418 | ) |
|
419 | 419 | # json.dumps only accepts unicode strings |
|
420 | 420 | result = pycompat.rapply( |
|
421 | 421 | lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x, |
|
422 | 422 | callconduit(ui, name, params), |
|
423 | 423 | ) |
|
424 | 424 | s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) |
|
425 | 425 | ui.write(b'%s\n' % encoding.unitolocal(s)) |
|
426 | 426 | |
|
427 | 427 | |
|
428 | 428 | def getrepophid(repo): |
|
429 | 429 | """given callsign, return repository PHID or None""" |
|
430 | 430 | # developer config: phabricator.repophid |
|
431 | 431 | repophid = repo.ui.config(b'phabricator', b'repophid') |
|
432 | 432 | if repophid: |
|
433 | 433 | return repophid |
|
434 | 434 | callsign = repo.ui.config(b'phabricator', b'callsign') |
|
435 | 435 | if not callsign: |
|
436 | 436 | return None |
|
437 | 437 | query = callconduit( |
|
438 | 438 | repo.ui, |
|
439 | 439 | b'diffusion.repository.search', |
|
440 | 440 | {b'constraints': {b'callsigns': [callsign]}}, |
|
441 | 441 | ) |
|
442 | 442 | if len(query[b'data']) == 0: |
|
443 | 443 | return None |
|
444 | 444 | repophid = query[b'data'][0][b'phid'] |
|
445 | 445 | repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
|
446 | 446 | return repophid |
|
447 | 447 | |
|
448 | 448 | |
|
449 | 449 | _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
|
450 | 450 | _differentialrevisiondescre = re.compile( |
|
451 | 451 | br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M |
|
452 | 452 | ) |
|
453 | 453 | |
|
454 | 454 | |
|
455 | 455 | def getoldnodedrevmap(repo, nodelist): |
|
456 | 456 | """find previous nodes that has been sent to Phabricator |
|
457 | 457 | |
|
458 | 458 | return {node: (oldnode, Differential diff, Differential Revision ID)} |
|
459 | 459 | for node in nodelist with known previous sent versions, or associated |
|
460 | 460 | Differential Revision IDs. ``oldnode`` and ``Differential diff`` could |
|
461 | 461 | be ``None``. |
|
462 | 462 | |
|
463 | 463 | Examines commit messages like "Differential Revision:" to get the |
|
464 | 464 | association information. |
|
465 | 465 | |
|
466 | 466 | If such commit message line is not found, examines all precursors and their |
|
467 | 467 | tags. Tags with format like "D1234" are considered a match and the node |
|
468 | 468 | with that tag, and the number after "D" (ex. 1234) will be returned. |
|
469 | 469 | |
|
470 | 470 | The ``old node``, if not None, is guaranteed to be the last diff of |
|
471 | 471 | corresponding Differential Revision, and exist in the repo. |
|
472 | 472 | """ |
|
473 | 473 | unfi = repo.unfiltered() |
|
474 | 474 | has_node = unfi.changelog.index.has_node |
|
475 | 475 | |
|
476 | 476 | result = {} # {node: (oldnode?, lastdiff?, drev)} |
|
477 | 477 | # ordered for test stability when printing new -> old mapping below |
|
478 | 478 | toconfirm = util.sortdict() # {node: (force, {precnode}, drev)} |
|
479 | 479 | for node in nodelist: |
|
480 | 480 | ctx = unfi[node] |
|
481 | 481 | # For tags like "D123", put them into "toconfirm" to verify later |
|
482 | 482 | precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node])) |
|
483 | 483 | for n in precnodes: |
|
484 | 484 | if has_node(n): |
|
485 | 485 | for tag in unfi.nodetags(n): |
|
486 | 486 | m = _differentialrevisiontagre.match(tag) |
|
487 | 487 | if m: |
|
488 | 488 | toconfirm[node] = (0, set(precnodes), int(m.group(1))) |
|
489 | 489 | break |
|
490 | 490 | else: |
|
491 | 491 | continue # move to next predecessor |
|
492 | 492 | break # found a tag, stop |
|
493 | 493 | else: |
|
494 | 494 | # Check commit message |
|
495 | 495 | m = _differentialrevisiondescre.search(ctx.description()) |
|
496 | 496 | if m: |
|
497 | 497 | toconfirm[node] = (1, set(precnodes), int(m.group('id'))) |
|
498 | 498 | |
|
499 | 499 | # Double check if tags are genuine by collecting all old nodes from |
|
500 | 500 | # Phabricator, and expect precursors overlap with it. |
|
501 | 501 | if toconfirm: |
|
502 | 502 | drevs = [drev for force, precs, drev in toconfirm.values()] |
|
503 | 503 | alldiffs = callconduit( |
|
504 | 504 | unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs} |
|
505 | 505 | ) |
|
506 | 506 | |
|
507 | 507 | def getnodes(d, precset): |
|
508 | 508 | # Ignore other nodes that were combined into the Differential |
|
509 | 509 | # that aren't predecessors of the current local node. |
|
510 | 510 | return [n for n in getlocalcommits(d) if n in precset] |
|
511 | 511 | |
|
512 | 512 | for newnode, (force, precset, drev) in toconfirm.items(): |
|
513 | 513 | diffs = [ |
|
514 | 514 | d for d in alldiffs.values() if int(d[b'revisionID']) == drev |
|
515 | 515 | ] |
|
516 | 516 | |
|
517 | 517 | # local predecessors known by Phabricator |
|
518 | 518 | phprecset = {n for d in diffs for n in getnodes(d, precset)} |
|
519 | 519 | |
|
520 | 520 | # Ignore if precursors (Phabricator and local repo) do not overlap, |
|
521 | 521 | # and force is not set (when commit message says nothing) |
|
522 | 522 | if not force and not phprecset: |
|
523 | 523 | tagname = b'D%d' % drev |
|
524 | 524 | tags.tag( |
|
525 | 525 | repo, |
|
526 | 526 | tagname, |
|
527 | 527 | nullid, |
|
528 | 528 | message=None, |
|
529 | 529 | user=None, |
|
530 | 530 | date=None, |
|
531 | 531 | local=True, |
|
532 | 532 | ) |
|
533 | 533 | unfi.ui.warn( |
|
534 | 534 | _( |
|
535 | 535 | b'D%d: local tag removed - does not match ' |
|
536 | 536 | b'Differential history\n' |
|
537 | 537 | ) |
|
538 | 538 | % drev |
|
539 | 539 | ) |
|
540 | 540 | continue |
|
541 | 541 | |
|
542 | 542 | # Find the last node using Phabricator metadata, and make sure it |
|
543 | 543 | # exists in the repo |
|
544 | 544 | oldnode = lastdiff = None |
|
545 | 545 | if diffs: |
|
546 | 546 | lastdiff = max(diffs, key=lambda d: int(d[b'id'])) |
|
547 | 547 | oldnodes = getnodes(lastdiff, precset) |
|
548 | 548 | |
|
549 | 549 | _debug( |
|
550 | 550 | unfi.ui, |
|
551 | 551 | b"%s mapped to old nodes %s\n" |
|
552 | 552 | % ( |
|
553 | 553 | short(newnode), |
|
554 | 554 | stringutil.pprint([short(n) for n in sorted(oldnodes)]), |
|
555 | 555 | ), |
|
556 | 556 | ) |
|
557 | 557 | |
|
558 | 558 | # If this commit was the result of `hg fold` after submission, |
|
559 | 559 | # and now resubmitted with --fold, the easiest thing to do is |
|
560 | 560 | # to leave the node clear. This only results in creating a new |
|
561 | 561 | # diff for the _same_ Differential Revision if this commit is |
|
562 | 562 | # the first or last in the selected range. If we picked a node |
|
563 | 563 | # from the list instead, it would have to be the lowest if at |
|
564 | 564 | # the beginning of the --fold range, or the highest at the end. |
|
565 | 565 | # Otherwise, one or more of the nodes wouldn't be considered in |
|
566 | 566 | # the diff, and the Differential wouldn't be properly updated. |
|
567 | 567 | # If this commit is the result of `hg split` in the same |
|
568 | 568 | # scenario, there is a single oldnode here (and multiple |
|
569 | 569 | # newnodes mapped to it). That makes it the same as the normal |
|
570 | 570 | # case, as the edges of the newnode range cleanly maps to one |
|
571 | 571 | # oldnode each. |
|
572 | 572 | if len(oldnodes) == 1: |
|
573 | 573 | oldnode = oldnodes[0] |
|
574 | 574 | if oldnode and not has_node(oldnode): |
|
575 | 575 | oldnode = None |
|
576 | 576 | |
|
577 | 577 | result[newnode] = (oldnode, lastdiff, drev) |
|
578 | 578 | |
|
579 | 579 | return result |
|
580 | 580 | |
|
581 | 581 | |
|
582 | 582 | def getdrevmap(repo, revs): |
|
583 | 583 | """Return a dict mapping each rev in `revs` to their Differential Revision |
|
584 | 584 | ID or None. |
|
585 | 585 | """ |
|
586 | 586 | result = {} |
|
587 | 587 | for rev in revs: |
|
588 | 588 | result[rev] = None |
|
589 | 589 | ctx = repo[rev] |
|
590 | 590 | # Check commit message |
|
591 | 591 | m = _differentialrevisiondescre.search(ctx.description()) |
|
592 | 592 | if m: |
|
593 | 593 | result[rev] = int(m.group('id')) |
|
594 | 594 | continue |
|
595 | 595 | # Check tags |
|
596 | 596 | for tag in repo.nodetags(ctx.node()): |
|
597 | 597 | m = _differentialrevisiontagre.match(tag) |
|
598 | 598 | if m: |
|
599 | 599 | result[rev] = int(m.group(1)) |
|
600 | 600 | break |
|
601 | 601 | |
|
602 | 602 | return result |
|
603 | 603 | |
|
604 | 604 | |
|
605 | 605 | def getdiff(basectx, ctx, diffopts): |
|
606 | 606 | """plain-text diff without header (user, commit message, etc)""" |
|
607 | 607 | output = util.stringio() |
|
608 | 608 | for chunk, _label in patch.diffui( |
|
609 | 609 | ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts |
|
610 | 610 | ): |
|
611 | 611 | output.write(chunk) |
|
612 | 612 | return output.getvalue() |
|
613 | 613 | |
|
614 | 614 | |
|
615 | 615 | class DiffChangeType(object): |
|
616 | 616 | ADD = 1 |
|
617 | 617 | CHANGE = 2 |
|
618 | 618 | DELETE = 3 |
|
619 | 619 | MOVE_AWAY = 4 |
|
620 | 620 | COPY_AWAY = 5 |
|
621 | 621 | MOVE_HERE = 6 |
|
622 | 622 | COPY_HERE = 7 |
|
623 | 623 | MULTICOPY = 8 |
|
624 | 624 | |
|
625 | 625 | |
|
626 | 626 | class DiffFileType(object): |
|
627 | 627 | TEXT = 1 |
|
628 | 628 | IMAGE = 2 |
|
629 | 629 | BINARY = 3 |
|
630 | 630 | |
|
631 | 631 | |
|
632 | 632 | @attr.s |
|
633 | 633 | class phabhunk(dict): |
|
634 | 634 | """Represents a Differential hunk, which is owned by a Differential change |
|
635 | 635 | """ |
|
636 | 636 | |
|
637 | 637 | oldOffset = attr.ib(default=0) # camelcase-required |
|
638 | 638 | oldLength = attr.ib(default=0) # camelcase-required |
|
639 | 639 | newOffset = attr.ib(default=0) # camelcase-required |
|
640 | 640 | newLength = attr.ib(default=0) # camelcase-required |
|
641 | 641 | corpus = attr.ib(default='') |
|
642 | 642 | # These get added to the phabchange's equivalents |
|
643 | 643 | addLines = attr.ib(default=0) # camelcase-required |
|
644 | 644 | delLines = attr.ib(default=0) # camelcase-required |
|
645 | 645 | |
|
646 | 646 | |
|
647 | 647 | @attr.s |
|
648 | 648 | class phabchange(object): |
|
649 | 649 | """Represents a Differential change, owns Differential hunks and owned by a |
|
650 | 650 | Differential diff. Each one represents one file in a diff. |
|
651 | 651 | """ |
|
652 | 652 | |
|
653 | 653 | currentPath = attr.ib(default=None) # camelcase-required |
|
654 | 654 | oldPath = attr.ib(default=None) # camelcase-required |
|
655 | 655 | awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required |
|
656 | 656 | metadata = attr.ib(default=attr.Factory(dict)) |
|
657 | 657 | oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
658 | 658 | newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
659 | 659 | type = attr.ib(default=DiffChangeType.CHANGE) |
|
660 | 660 | fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required |
|
661 | 661 | commitHash = attr.ib(default=None) # camelcase-required |
|
662 | 662 | addLines = attr.ib(default=0) # camelcase-required |
|
663 | 663 | delLines = attr.ib(default=0) # camelcase-required |
|
664 | 664 | hunks = attr.ib(default=attr.Factory(list)) |
|
665 | 665 | |
|
666 | 666 | def copynewmetadatatoold(self): |
|
667 | 667 | for key in list(self.metadata.keys()): |
|
668 | 668 | newkey = key.replace(b'new:', b'old:') |
|
669 | 669 | self.metadata[newkey] = self.metadata[key] |
|
670 | 670 | |
|
671 | 671 | def addoldmode(self, value): |
|
672 | 672 | self.oldProperties[b'unix:filemode'] = value |
|
673 | 673 | |
|
674 | 674 | def addnewmode(self, value): |
|
675 | 675 | self.newProperties[b'unix:filemode'] = value |
|
676 | 676 | |
|
677 | 677 | def addhunk(self, hunk): |
|
678 | 678 | if not isinstance(hunk, phabhunk): |
|
679 | 679 | raise error.Abort(b'phabchange.addhunk only takes phabhunks') |
|
680 | 680 | self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk))) |
|
681 | 681 | # It's useful to include these stats since the Phab web UI shows them, |
|
682 | 682 | # and uses them to estimate how large a change a Revision is. Also used |
|
683 | 683 | # in email subjects for the [+++--] bit. |
|
684 | 684 | self.addLines += hunk.addLines |
|
685 | 685 | self.delLines += hunk.delLines |
|
686 | 686 | |
|
687 | 687 | |
|
688 | 688 | @attr.s |
|
689 | 689 | class phabdiff(object): |
|
690 | 690 | """Represents a Differential diff, owns Differential changes. Corresponds |
|
691 | 691 | to a commit. |
|
692 | 692 | """ |
|
693 | 693 | |
|
694 | 694 | # Doesn't seem to be any reason to send this (output of uname -n) |
|
695 | 695 | sourceMachine = attr.ib(default=b'') # camelcase-required |
|
696 | 696 | sourcePath = attr.ib(default=b'/') # camelcase-required |
|
697 | 697 | sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required |
|
698 | 698 | sourceControlPath = attr.ib(default=b'/') # camelcase-required |
|
699 | 699 | sourceControlSystem = attr.ib(default=b'hg') # camelcase-required |
|
700 | 700 | branch = attr.ib(default=b'default') |
|
701 | 701 | bookmark = attr.ib(default=None) |
|
702 | 702 | creationMethod = attr.ib(default=b'phabsend') # camelcase-required |
|
703 | 703 | lintStatus = attr.ib(default=b'none') # camelcase-required |
|
704 | 704 | unitStatus = attr.ib(default=b'none') # camelcase-required |
|
705 | 705 | changes = attr.ib(default=attr.Factory(dict)) |
|
706 | 706 | repositoryPHID = attr.ib(default=None) # camelcase-required |
|
707 | 707 | |
|
708 | 708 | def addchange(self, change): |
|
709 | 709 | if not isinstance(change, phabchange): |
|
710 | 710 | raise error.Abort(b'phabdiff.addchange only takes phabchanges') |
|
711 | 711 | self.changes[change.currentPath] = pycompat.byteskwargs( |
|
712 | 712 | attr.asdict(change) |
|
713 | 713 | ) |
|
714 | 714 | |
|
715 | 715 | |
|
716 | 716 | def maketext(pchange, basectx, ctx, fname): |
|
717 | 717 | """populate the phabchange for a text file""" |
|
718 | 718 | repo = ctx.repo() |
|
719 | 719 | fmatcher = match.exact([fname]) |
|
720 | 720 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
721 | 721 | _pfctx, _fctx, header, fhunks = next( |
|
722 | 722 | patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts) |
|
723 | 723 | ) |
|
724 | 724 | |
|
725 | 725 | for fhunk in fhunks: |
|
726 | 726 | (oldOffset, oldLength, newOffset, newLength), lines = fhunk |
|
727 | 727 | corpus = b''.join(lines[1:]) |
|
728 | 728 | shunk = list(header) |
|
729 | 729 | shunk.extend(lines) |
|
730 | 730 | _mf, _mt, addLines, delLines, _hb = patch.diffstatsum( |
|
731 | 731 | patch.diffstatdata(util.iterlines(shunk)) |
|
732 | 732 | ) |
|
733 | 733 | pchange.addhunk( |
|
734 | 734 | phabhunk( |
|
735 | 735 | oldOffset, |
|
736 | 736 | oldLength, |
|
737 | 737 | newOffset, |
|
738 | 738 | newLength, |
|
739 | 739 | corpus, |
|
740 | 740 | addLines, |
|
741 | 741 | delLines, |
|
742 | 742 | ) |
|
743 | 743 | ) |
|
744 | 744 | |
|
745 | 745 | |
|
746 | 746 | def uploadchunks(fctx, fphid): |
|
747 | 747 | """upload large binary files as separate chunks. |
|
748 | 748 | Phab requests chunking over 8MiB, and splits into 4MiB chunks |
|
749 | 749 | """ |
|
750 | 750 | ui = fctx.repo().ui |
|
751 | 751 | chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid}) |
|
752 | 752 | with ui.makeprogress( |
|
753 | 753 | _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks) |
|
754 | 754 | ) as progress: |
|
755 | 755 | for chunk in chunks: |
|
756 | 756 | progress.increment() |
|
757 | 757 | if chunk[b'complete']: |
|
758 | 758 | continue |
|
759 | 759 | bstart = int(chunk[b'byteStart']) |
|
760 | 760 | bend = int(chunk[b'byteEnd']) |
|
761 | 761 | callconduit( |
|
762 | 762 | ui, |
|
763 | 763 | b'file.uploadchunk', |
|
764 | 764 | { |
|
765 | 765 | b'filePHID': fphid, |
|
766 | 766 | b'byteStart': bstart, |
|
767 | 767 | b'data': base64.b64encode(fctx.data()[bstart:bend]), |
|
768 | 768 | b'dataEncoding': b'base64', |
|
769 | 769 | }, |
|
770 | 770 | ) |
|
771 | 771 | |
|
772 | 772 | |
|
773 | 773 | def uploadfile(fctx): |
|
774 | 774 | """upload binary files to Phabricator""" |
|
775 | 775 | repo = fctx.repo() |
|
776 | 776 | ui = repo.ui |
|
777 | 777 | fname = fctx.path() |
|
778 | 778 | size = fctx.size() |
|
779 | 779 | fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest()) |
|
780 | 780 | |
|
781 | 781 | # an allocate call is required first to see if an upload is even required |
|
782 | 782 | # (Phab might already have it) and to determine if chunking is needed |
|
783 | 783 | allocateparams = { |
|
784 | 784 | b'name': fname, |
|
785 | 785 | b'contentLength': size, |
|
786 | 786 | b'contentHash': fhash, |
|
787 | 787 | } |
|
788 | 788 | filealloc = callconduit(ui, b'file.allocate', allocateparams) |
|
789 | 789 | fphid = filealloc[b'filePHID'] |
|
790 | 790 | |
|
791 | 791 | if filealloc[b'upload']: |
|
792 | 792 | ui.write(_(b'uploading %s\n') % bytes(fctx)) |
|
793 | 793 | if not fphid: |
|
794 | 794 | uploadparams = { |
|
795 | 795 | b'name': fname, |
|
796 | 796 | b'data_base64': base64.b64encode(fctx.data()), |
|
797 | 797 | } |
|
798 | 798 | fphid = callconduit(ui, b'file.upload', uploadparams) |
|
799 | 799 | else: |
|
800 | 800 | uploadchunks(fctx, fphid) |
|
801 | 801 | else: |
|
802 | 802 | ui.debug(b'server already has %s\n' % bytes(fctx)) |
|
803 | 803 | |
|
804 | 804 | if not fphid: |
|
805 | 805 | raise error.Abort(b'Upload of %s failed.' % bytes(fctx)) |
|
806 | 806 | |
|
807 | 807 | return fphid |
|
808 | 808 | |
|
809 | 809 | |
|
810 | 810 | def addoldbinary(pchange, oldfctx, fctx): |
|
811 | 811 | """add the metadata for the previous version of a binary file to the |
|
812 | 812 | phabchange for the new version |
|
813 | 813 | |
|
814 | 814 | ``oldfctx`` is the previous version of the file; ``fctx`` is the new |
|
815 | 815 | version of the file, or None if the file is being removed. |
|
816 | 816 | """ |
|
817 | 817 | if not fctx or fctx.cmp(oldfctx): |
|
818 | 818 | # Files differ, add the old one |
|
819 | 819 | pchange.metadata[b'old:file:size'] = oldfctx.size() |
|
820 | 820 | mimeguess, _enc = mimetypes.guess_type( |
|
821 | 821 | encoding.unifromlocal(oldfctx.path()) |
|
822 | 822 | ) |
|
823 | 823 | if mimeguess: |
|
824 | 824 | pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr( |
|
825 | 825 | mimeguess |
|
826 | 826 | ) |
|
827 | 827 | fphid = uploadfile(oldfctx) |
|
828 | 828 | pchange.metadata[b'old:binary-phid'] = fphid |
|
829 | 829 | else: |
|
830 | 830 | # If it's left as IMAGE/BINARY web UI might try to display it |
|
831 | 831 | pchange.fileType = DiffFileType.TEXT |
|
832 | 832 | pchange.copynewmetadatatoold() |
|
833 | 833 | |
|
834 | 834 | |
|
835 | 835 | def makebinary(pchange, fctx): |
|
836 | 836 | """populate the phabchange for a binary file""" |
|
837 | 837 | pchange.fileType = DiffFileType.BINARY |
|
838 | 838 | fphid = uploadfile(fctx) |
|
839 | 839 | pchange.metadata[b'new:binary-phid'] = fphid |
|
840 | 840 | pchange.metadata[b'new:file:size'] = fctx.size() |
|
841 | 841 | mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path())) |
|
842 | 842 | if mimeguess: |
|
843 | 843 | mimeguess = pycompat.bytestr(mimeguess) |
|
844 | 844 | pchange.metadata[b'new:file:mime-type'] = mimeguess |
|
845 | 845 | if mimeguess.startswith(b'image/'): |
|
846 | 846 | pchange.fileType = DiffFileType.IMAGE |
|
847 | 847 | |
|
848 | 848 | |
|
849 | 849 | # Copied from mercurial/patch.py |
|
850 | 850 | gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'} |
|
851 | 851 | |
|
852 | 852 | |
|
853 | 853 | def notutf8(fctx): |
|
854 | 854 | """detect non-UTF-8 text files since Phabricator requires them to be marked |
|
855 | 855 | as binary |
|
856 | 856 | """ |
|
857 | 857 | try: |
|
858 | 858 | fctx.data().decode('utf-8') |
|
859 | 859 | return False |
|
860 | 860 | except UnicodeDecodeError: |
|
861 | 861 | fctx.repo().ui.write( |
|
862 | 862 | _(b'file %s detected as non-UTF-8, marked as binary\n') |
|
863 | 863 | % fctx.path() |
|
864 | 864 | ) |
|
865 | 865 | return True |
|
866 | 866 | |
|
867 | 867 | |
|
868 | 868 | def addremoved(pdiff, basectx, ctx, removed): |
|
869 | 869 | """add removed files to the phabdiff. Shouldn't include moves""" |
|
870 | 870 | for fname in removed: |
|
871 | 871 | pchange = phabchange( |
|
872 | 872 | currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE |
|
873 | 873 | ) |
|
874 | 874 | oldfctx = basectx.p1()[fname] |
|
875 | 875 | pchange.addoldmode(gitmode[oldfctx.flags()]) |
|
876 | 876 | if not (oldfctx.isbinary() or notutf8(oldfctx)): |
|
877 | 877 | maketext(pchange, basectx, ctx, fname) |
|
878 | 878 | |
|
879 | 879 | pdiff.addchange(pchange) |
|
880 | 880 | |
|
881 | 881 | |
|
882 | 882 | def addmodified(pdiff, basectx, ctx, modified): |
|
883 | 883 | """add modified files to the phabdiff""" |
|
884 | 884 | for fname in modified: |
|
885 | 885 | fctx = ctx[fname] |
|
886 | 886 | oldfctx = basectx.p1()[fname] |
|
887 | 887 | pchange = phabchange(currentPath=fname, oldPath=fname) |
|
888 | 888 | filemode = gitmode[fctx.flags()] |
|
889 | 889 | originalmode = gitmode[oldfctx.flags()] |
|
890 | 890 | if filemode != originalmode: |
|
891 | 891 | pchange.addoldmode(originalmode) |
|
892 | 892 | pchange.addnewmode(filemode) |
|
893 | 893 | |
|
894 | 894 | if ( |
|
895 | 895 | fctx.isbinary() |
|
896 | 896 | or notutf8(fctx) |
|
897 | 897 | or oldfctx.isbinary() |
|
898 | 898 | or notutf8(oldfctx) |
|
899 | 899 | ): |
|
900 | 900 | makebinary(pchange, fctx) |
|
901 | 901 | addoldbinary(pchange, oldfctx, fctx) |
|
902 | 902 | else: |
|
903 | 903 | maketext(pchange, basectx, ctx, fname) |
|
904 | 904 | |
|
905 | 905 | pdiff.addchange(pchange) |
|
906 | 906 | |
|
907 | 907 | |
|
908 | 908 | def addadded(pdiff, basectx, ctx, added, removed): |
|
909 | 909 | """add file adds to the phabdiff, both new files and copies/moves""" |
|
910 | 910 | # Keep track of files that've been recorded as moved/copied, so if there are |
|
911 | 911 | # additional copies we can mark them (moves get removed from removed) |
|
912 | 912 | copiedchanges = {} |
|
913 | 913 | movedchanges = {} |
|
914 | 914 | |
|
915 | 915 | copy = {} |
|
916 | 916 | if basectx != ctx: |
|
917 | 917 | copy = copies.pathcopies(basectx.p1(), ctx) |
|
918 | 918 | |
|
919 | 919 | for fname in added: |
|
920 | 920 | fctx = ctx[fname] |
|
921 | 921 | oldfctx = None |
|
922 | 922 | pchange = phabchange(currentPath=fname) |
|
923 | 923 | |
|
924 | 924 | filemode = gitmode[fctx.flags()] |
|
925 | 925 | |
|
926 | 926 | if copy: |
|
927 | 927 | originalfname = copy.get(fname, fname) |
|
928 | 928 | else: |
|
929 | 929 | originalfname = fname |
|
930 | 930 | if fctx.renamed(): |
|
931 | 931 | originalfname = fctx.renamed()[0] |
|
932 | 932 | |
|
933 | 933 | renamed = fname != originalfname |
|
934 | 934 | |
|
935 | 935 | if renamed: |
|
936 | 936 | oldfctx = basectx.p1()[originalfname] |
|
937 | 937 | originalmode = gitmode[oldfctx.flags()] |
|
938 | 938 | pchange.oldPath = originalfname |
|
939 | 939 | |
|
940 | 940 | if originalfname in removed: |
|
941 | 941 | origpchange = phabchange( |
|
942 | 942 | currentPath=originalfname, |
|
943 | 943 | oldPath=originalfname, |
|
944 | 944 | type=DiffChangeType.MOVE_AWAY, |
|
945 | 945 | awayPaths=[fname], |
|
946 | 946 | ) |
|
947 | 947 | movedchanges[originalfname] = origpchange |
|
948 | 948 | removed.remove(originalfname) |
|
949 | 949 | pchange.type = DiffChangeType.MOVE_HERE |
|
950 | 950 | elif originalfname in movedchanges: |
|
951 | 951 | movedchanges[originalfname].type = DiffChangeType.MULTICOPY |
|
952 | 952 | movedchanges[originalfname].awayPaths.append(fname) |
|
953 | 953 | pchange.type = DiffChangeType.COPY_HERE |
|
954 | 954 | else: # pure copy |
|
955 | 955 | if originalfname not in copiedchanges: |
|
956 | 956 | origpchange = phabchange( |
|
957 | 957 | currentPath=originalfname, type=DiffChangeType.COPY_AWAY |
|
958 | 958 | ) |
|
959 | 959 | copiedchanges[originalfname] = origpchange |
|
960 | 960 | else: |
|
961 | 961 | origpchange = copiedchanges[originalfname] |
|
962 | 962 | origpchange.awayPaths.append(fname) |
|
963 | 963 | pchange.type = DiffChangeType.COPY_HERE |
|
964 | 964 | |
|
965 | 965 | if filemode != originalmode: |
|
966 | 966 | pchange.addoldmode(originalmode) |
|
967 | 967 | pchange.addnewmode(filemode) |
|
968 | 968 | else: # Brand-new file |
|
969 | 969 | pchange.addnewmode(gitmode[fctx.flags()]) |
|
970 | 970 | pchange.type = DiffChangeType.ADD |
|
971 | 971 | |
|
972 | 972 | if ( |
|
973 | 973 | fctx.isbinary() |
|
974 | 974 | or notutf8(fctx) |
|
975 | 975 | or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx))) |
|
976 | 976 | ): |
|
977 | 977 | makebinary(pchange, fctx) |
|
978 | 978 | if renamed: |
|
979 | 979 | addoldbinary(pchange, oldfctx, fctx) |
|
980 | 980 | else: |
|
981 | 981 | maketext(pchange, basectx, ctx, fname) |
|
982 | 982 | |
|
983 | 983 | pdiff.addchange(pchange) |
|
984 | 984 | |
|
985 | 985 | for _path, copiedchange in copiedchanges.items(): |
|
986 | 986 | pdiff.addchange(copiedchange) |
|
987 | 987 | for _path, movedchange in movedchanges.items(): |
|
988 | 988 | pdiff.addchange(movedchange) |
|
989 | 989 | |
|
990 | 990 | |
|
991 | 991 | def creatediff(basectx, ctx): |
|
992 | 992 | """create a Differential Diff""" |
|
993 | 993 | repo = ctx.repo() |
|
994 | 994 | repophid = getrepophid(repo) |
|
995 | 995 | # Create a "Differential Diff" via "differential.creatediff" API |
|
996 | 996 | pdiff = phabdiff( |
|
997 | 997 | sourceControlBaseRevision=b'%s' % basectx.p1().hex(), |
|
998 | 998 | branch=b'%s' % ctx.branch(), |
|
999 | 999 | ) |
|
1000 | 1000 | modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx) |
|
1001 | 1001 | # addadded will remove moved files from removed, so addremoved won't get |
|
1002 | 1002 | # them |
|
1003 | 1003 | addadded(pdiff, basectx, ctx, added, removed) |
|
1004 | 1004 | addmodified(pdiff, basectx, ctx, modified) |
|
1005 | 1005 | addremoved(pdiff, basectx, ctx, removed) |
|
1006 | 1006 | if repophid: |
|
1007 | 1007 | pdiff.repositoryPHID = repophid |
|
1008 | 1008 | diff = callconduit( |
|
1009 | 1009 | repo.ui, |
|
1010 | 1010 | b'differential.creatediff', |
|
1011 | 1011 | pycompat.byteskwargs(attr.asdict(pdiff)), |
|
1012 | 1012 | ) |
|
1013 | 1013 | if not diff: |
|
1014 | 1014 | if basectx != ctx: |
|
1015 | 1015 | msg = _(b'cannot create diff for %s::%s') % (basectx, ctx) |
|
1016 | 1016 | else: |
|
1017 | 1017 | msg = _(b'cannot create diff for %s') % ctx |
|
1018 | 1018 | raise error.Abort(msg) |
|
1019 | 1019 | return diff |
|
1020 | 1020 | |
|
1021 | 1021 | |
|
1022 | 1022 | def writediffproperties(ctxs, diff): |
|
1023 | 1023 | """write metadata to diff so patches could be applied losslessly |
|
1024 | 1024 | |
|
1025 | 1025 | ``ctxs`` is the list of commits that created the diff, in ascending order. |
|
1026 | 1026 | The list is generally a single commit, but may be several when using |
|
1027 | 1027 | ``phabsend --fold``. |
|
1028 | 1028 | """ |
|
1029 | 1029 | # creatediff returns with a diffid but query returns with an id |
|
1030 | 1030 | diffid = diff.get(b'diffid', diff.get(b'id')) |
|
1031 | 1031 | basectx = ctxs[0] |
|
1032 | 1032 | tipctx = ctxs[-1] |
|
1033 | 1033 | |
|
1034 | 1034 | params = { |
|
1035 | 1035 | b'diff_id': diffid, |
|
1036 | 1036 | b'name': b'hg:meta', |
|
1037 | 1037 | b'data': templatefilters.json( |
|
1038 | 1038 | { |
|
1039 | 1039 | b'user': tipctx.user(), |
|
1040 | 1040 | b'date': b'%d %d' % tipctx.date(), |
|
1041 | 1041 | b'branch': tipctx.branch(), |
|
1042 | 1042 | b'node': tipctx.hex(), |
|
1043 | 1043 | b'parent': basectx.p1().hex(), |
|
1044 | 1044 | } |
|
1045 | 1045 | ), |
|
1046 | 1046 | } |
|
1047 | 1047 | callconduit(basectx.repo().ui, b'differential.setdiffproperty', params) |
|
1048 | 1048 | |
|
1049 | 1049 | commits = {} |
|
1050 | 1050 | for ctx in ctxs: |
|
1051 | 1051 | commits[ctx.hex()] = { |
|
1052 | 1052 | b'author': stringutil.person(ctx.user()), |
|
1053 | 1053 | b'authorEmail': stringutil.email(ctx.user()), |
|
1054 | 1054 | b'time': int(ctx.date()[0]), |
|
1055 | 1055 | b'commit': ctx.hex(), |
|
1056 | 1056 | b'parents': [ctx.p1().hex()], |
|
1057 | 1057 | b'branch': ctx.branch(), |
|
1058 | 1058 | } |
|
1059 | 1059 | params = { |
|
1060 | 1060 | b'diff_id': diffid, |
|
1061 | 1061 | b'name': b'local:commits', |
|
1062 | 1062 | b'data': templatefilters.json(commits), |
|
1063 | 1063 | } |
|
1064 | 1064 | callconduit(basectx.repo().ui, b'differential.setdiffproperty', params) |
|
1065 | 1065 | |
|
1066 | 1066 | |
|
1067 | 1067 | def createdifferentialrevision( |
|
1068 | 1068 | ctxs, |
|
1069 | 1069 | revid=None, |
|
1070 | 1070 | parentrevphid=None, |
|
1071 | 1071 | oldbasenode=None, |
|
1072 | 1072 | oldnode=None, |
|
1073 | 1073 | olddiff=None, |
|
1074 | 1074 | actions=None, |
|
1075 | 1075 | comment=None, |
|
1076 | 1076 | ): |
|
1077 | 1077 | """create or update a Differential Revision |
|
1078 | 1078 | |
|
1079 | 1079 | If revid is None, create a new Differential Revision, otherwise update |
|
1080 | 1080 | revid. If parentrevphid is not None, set it as a dependency. |
|
1081 | 1081 | |
|
1082 | 1082 | If there is a single commit for the new Differential Revision, ``ctxs`` will |
|
1083 | 1083 | be a list of that single context. Otherwise, it is a list that covers the |
|
1084 | 1084 | range of changes for the differential, where ``ctxs[0]`` is the first change |
|
1085 | 1085 | to include and ``ctxs[-1]`` is the last. |
|
1086 | 1086 | |
|
1087 | 1087 | If oldnode is not None, check if the patch content (without commit message |
|
1088 | 1088 | and metadata) has changed before creating another diff. For a Revision with |
|
1089 | 1089 | a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a |
|
1090 | 1090 | Revision covering multiple commits, ``oldbasenode`` corresponds to |
|
1091 | 1091 | ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode`` |
|
1092 | 1092 | corresponds to ``ctxs[-1]``. |
|
1093 | 1093 | |
|
1094 | 1094 | If actions is not None, they will be appended to the transaction. |
|
1095 | 1095 | """ |
|
1096 | 1096 | ctx = ctxs[-1] |
|
1097 | 1097 | basectx = ctxs[0] |
|
1098 | 1098 | |
|
1099 | 1099 | repo = ctx.repo() |
|
1100 | 1100 | if oldnode: |
|
1101 | 1101 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
1102 | 1102 | unfi = repo.unfiltered() |
|
1103 | 1103 | oldctx = unfi[oldnode] |
|
1104 | 1104 | oldbasectx = unfi[oldbasenode] |
|
1105 | 1105 | neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff( |
|
1106 | 1106 | oldbasectx, oldctx, diffopts |
|
1107 | 1107 | ) |
|
1108 | 1108 | else: |
|
1109 | 1109 | neednewdiff = True |
|
1110 | 1110 | |
|
1111 | 1111 | transactions = [] |
|
1112 | 1112 | if neednewdiff: |
|
1113 | 1113 | diff = creatediff(basectx, ctx) |
|
1114 | 1114 | transactions.append({b'type': b'update', b'value': diff[b'phid']}) |
|
1115 | 1115 | if comment: |
|
1116 | 1116 | transactions.append({b'type': b'comment', b'value': comment}) |
|
1117 | 1117 | else: |
|
1118 | 1118 | # Even if we don't need to upload a new diff because the patch content |
|
1119 | 1119 | # does not change. We might still need to update its metadata so |
|
1120 | 1120 | # pushers could know the correct node metadata. |
|
1121 | 1121 | assert olddiff |
|
1122 | 1122 | diff = olddiff |
|
1123 | 1123 | writediffproperties(ctxs, diff) |
|
1124 | 1124 | |
|
1125 | 1125 | # Set the parent Revision every time, so commit re-ordering is picked-up |
|
1126 | 1126 | if parentrevphid: |
|
1127 | 1127 | transactions.append( |
|
1128 | 1128 | {b'type': b'parents.set', b'value': [parentrevphid]} |
|
1129 | 1129 | ) |
|
1130 | 1130 | |
|
1131 | 1131 | if actions: |
|
1132 | 1132 | transactions += actions |
|
1133 | 1133 | |
|
1134 | 1134 | # When folding multiple local commits into a single review, arcanist will |
|
1135 | 1135 | # take the summary line of the first commit as the title, and then |
|
1136 | 1136 | # concatenate the rest of the remaining messages (including each of their |
|
1137 | 1137 | # first lines) to the rest of the first commit message (each separated by |
|
1138 | 1138 | # an empty line), and use that as the summary field. Do the same here. |
|
1139 | 1139 | # For commits with only a one line message, there is no summary field, as |
|
1140 | 1140 | # this gets assigned to the title. |
|
1141 | 1141 | fields = util.sortdict() # sorted for stable wire protocol in tests |
|
1142 | 1142 | |
|
1143 | 1143 | for i, _ctx in enumerate(ctxs): |
|
1144 | 1144 | # Parse commit message and update related fields. |
|
1145 | 1145 | desc = _ctx.description() |
|
1146 | 1146 | info = callconduit( |
|
1147 | 1147 | repo.ui, b'differential.parsecommitmessage', {b'corpus': desc} |
|
1148 | 1148 | ) |
|
1149 | 1149 | |
|
1150 | 1150 | for k in [b'title', b'summary', b'testPlan']: |
|
1151 | 1151 | v = info[b'fields'].get(k) |
|
1152 | 1152 | if not v: |
|
1153 | 1153 | continue |
|
1154 | 1154 | |
|
1155 | 1155 | if i == 0: |
|
1156 | 1156 | # Title, summary and test plan (if present) are taken verbatim |
|
1157 | 1157 | # for the first commit. |
|
1158 | 1158 | fields[k] = v.rstrip() |
|
1159 | 1159 | continue |
|
1160 | 1160 | elif k == b'title': |
|
1161 | 1161 | # Add subsequent titles (i.e. the first line of the commit |
|
1162 | 1162 | # message) back to the summary. |
|
1163 | 1163 | k = b'summary' |
|
1164 | 1164 | |
|
1165 | 1165 | # Append any current field to the existing composite field |
|
1166 | 1166 | fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()])) |
|
1167 | 1167 | |
|
1168 | 1168 | for k, v in fields.items(): |
|
1169 | 1169 | transactions.append({b'type': k, b'value': v}) |
|
1170 | 1170 | |
|
1171 | 1171 | params = {b'transactions': transactions} |
|
1172 | 1172 | if revid is not None: |
|
1173 | 1173 | # Update an existing Differential Revision |
|
1174 | 1174 | params[b'objectIdentifier'] = revid |
|
1175 | 1175 | |
|
1176 | 1176 | revision = callconduit(repo.ui, b'differential.revision.edit', params) |
|
1177 | 1177 | if not revision: |
|
1178 | 1178 | if len(ctxs) == 1: |
|
1179 | 1179 | msg = _(b'cannot create revision for %s') % ctx |
|
1180 | 1180 | else: |
|
1181 | 1181 | msg = _(b'cannot create revision for %s::%s') % (basectx, ctx) |
|
1182 | 1182 | raise error.Abort(msg) |
|
1183 | 1183 | |
|
1184 | 1184 | return revision, diff |
|
1185 | 1185 | |
|
1186 | 1186 | |
|
1187 | 1187 | def userphids(ui, names): |
|
1188 | 1188 | """convert user names to PHIDs""" |
|
1189 | 1189 | names = [name.lower() for name in names] |
|
1190 | 1190 | query = {b'constraints': {b'usernames': names}} |
|
1191 | 1191 | result = callconduit(ui, b'user.search', query) |
|
1192 | 1192 | # username not found is not an error of the API. So check if we have missed |
|
1193 | 1193 | # some names here. |
|
1194 | 1194 | data = result[b'data'] |
|
1195 | 1195 | resolved = {entry[b'fields'][b'username'].lower() for entry in data} |
|
1196 | 1196 | unresolved = set(names) - resolved |
|
1197 | 1197 | if unresolved: |
|
1198 | 1198 | raise error.Abort( |
|
1199 | 1199 | _(b'unknown username: %s') % b' '.join(sorted(unresolved)) |
|
1200 | 1200 | ) |
|
1201 | 1201 | return [entry[b'phid'] for entry in data] |
|
1202 | 1202 | |
|
1203 | 1203 | |
|
1204 | 1204 | def _print_phabsend_action(ui, ctx, newrevid, action): |
|
1205 | 1205 | """print the ``action`` that occurred when posting ``ctx`` for review |
|
1206 | 1206 | |
|
1207 | 1207 | This is a utility function for the sending phase of ``phabsend``, which |
|
1208 | 1208 | makes it easier to show a status for all local commits with `--fold``. |
|
1209 | 1209 | """ |
|
1210 | 1210 | actiondesc = ui.label( |
|
1211 | 1211 | { |
|
1212 | 1212 | b'created': _(b'created'), |
|
1213 | 1213 | b'skipped': _(b'skipped'), |
|
1214 | 1214 | b'updated': _(b'updated'), |
|
1215 | 1215 | }[action], |
|
1216 | 1216 | b'phabricator.action.%s' % action, |
|
1217 | 1217 | ) |
|
1218 | 1218 | drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') |
|
1219 | 1219 | nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
|
1220 | 1220 | desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
|
1221 | 1221 | ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)) |
|
1222 | 1222 | |
|
1223 | 1223 | |
|
1224 | 1224 | def _amend_diff_properties(unfi, drevid, newnodes, diff): |
|
1225 | 1225 | """update the local commit list for the ``diff`` associated with ``drevid`` |
|
1226 | 1226 | |
|
1227 | 1227 | This is a utility function for the amend phase of ``phabsend``, which |
|
1228 | 1228 | converts failures to warning messages. |
|
1229 | 1229 | """ |
|
1230 | 1230 | _debug( |
|
1231 | 1231 | unfi.ui, |
|
1232 | 1232 | b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]), |
|
1233 | 1233 | ) |
|
1234 | 1234 | |
|
1235 | 1235 | try: |
|
1236 | 1236 | writediffproperties([unfi[newnode] for newnode in newnodes], diff) |
|
1237 | 1237 | except util.urlerr.urlerror: |
|
1238 | 1238 | # If it fails just warn and keep going, otherwise the DREV |
|
1239 | 1239 | # associations will be lost |
|
1240 | 1240 | unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid) |
|
1241 | 1241 | |
|
1242 | 1242 | |
|
1243 | 1243 | @vcrcommand( |
|
1244 | 1244 | b'phabsend', |
|
1245 | 1245 | [ |
|
1246 | 1246 | (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
|
1247 | 1247 | (b'', b'amend', True, _(b'update commit messages')), |
|
1248 | 1248 | (b'', b'reviewer', [], _(b'specify reviewers')), |
|
1249 | 1249 | (b'', b'blocker', [], _(b'specify blocking reviewers')), |
|
1250 | 1250 | ( |
|
1251 | 1251 | b'm', |
|
1252 | 1252 | b'comment', |
|
1253 | 1253 | b'', |
|
1254 | 1254 | _(b'add a comment to Revisions with new/updated Diffs'), |
|
1255 | 1255 | ), |
|
1256 | 1256 | (b'', b'confirm', None, _(b'ask for confirmation before sending')), |
|
1257 | 1257 | (b'', b'fold', False, _(b'combine the revisions into one review')), |
|
1258 | 1258 | ], |
|
1259 | 1259 | _(b'REV [OPTIONS]'), |
|
1260 | 1260 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1261 | 1261 | ) |
|
1262 | 1262 | def phabsend(ui, repo, *revs, **opts): |
|
1263 | 1263 | """upload changesets to Phabricator |
|
1264 | 1264 | |
|
1265 | 1265 | If there are multiple revisions specified, they will be send as a stack |
|
1266 | 1266 | with a linear dependencies relationship using the order specified by the |
|
1267 | 1267 | revset. |
|
1268 | 1268 | |
|
1269 | 1269 | For the first time uploading changesets, local tags will be created to |
|
1270 | 1270 | maintain the association. After the first time, phabsend will check |
|
1271 | 1271 | obsstore and tags information so it can figure out whether to update an |
|
1272 | 1272 | existing Differential Revision, or create a new one. |
|
1273 | 1273 | |
|
1274 | 1274 | If --amend is set, update commit messages so they have the |
|
1275 | 1275 | ``Differential Revision`` URL, remove related tags. This is similar to what |
|
1276 | 1276 | arcanist will do, and is more desired in author-push workflows. Otherwise, |
|
1277 | 1277 | use local tags to record the ``Differential Revision`` association. |
|
1278 | 1278 | |
|
1279 | 1279 | The --confirm option lets you confirm changesets before sending them. You |
|
1280 | 1280 | can also add following to your configuration file to make it default |
|
1281 | 1281 | behaviour:: |
|
1282 | 1282 | |
|
1283 | 1283 | [phabsend] |
|
1284 | 1284 | confirm = true |
|
1285 | 1285 | |
|
1286 | 1286 | By default, a separate review will be created for each commit that is |
|
1287 | 1287 | selected, and will have the same parent/child relationship in Phabricator. |
|
1288 | 1288 | If ``--fold`` is set, multiple commits are rolled up into a single review |
|
1289 | 1289 | as if diffed from the parent of the first revision to the last. The commit |
|
1290 | 1290 | messages are concatenated in the summary field on Phabricator. |
|
1291 | 1291 | |
|
1292 | 1292 | phabsend will check obsstore and the above association to decide whether to |
|
1293 | 1293 | update an existing Differential Revision, or create a new one. |
|
1294 | 1294 | """ |
|
1295 | 1295 | opts = pycompat.byteskwargs(opts) |
|
1296 | 1296 | revs = list(revs) + opts.get(b'rev', []) |
|
1297 | 1297 | revs = scmutil.revrange(repo, revs) |
|
1298 | 1298 | revs.sort() # ascending order to preserve topological parent/child in phab |
|
1299 | 1299 | |
|
1300 | 1300 | if not revs: |
|
1301 | 1301 | raise error.Abort(_(b'phabsend requires at least one changeset')) |
|
1302 | 1302 | if opts.get(b'amend'): |
|
1303 | 1303 | cmdutil.checkunfinished(repo) |
|
1304 | 1304 | |
|
1305 | 1305 | ctxs = [repo[rev] for rev in revs] |
|
1306 | 1306 | |
|
1307 | 1307 | if any(c for c in ctxs if c.obsolete()): |
|
1308 | 1308 | raise error.Abort(_(b"obsolete commits cannot be posted for review")) |
|
1309 | 1309 | |
|
1310 | 1310 | # Ensure the local commits are an unbroken range. The semantics of the |
|
1311 | 1311 | # --fold option implies this, and the auto restacking of orphans requires |
|
1312 | 1312 | # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to |
|
1313 | 1313 | # get A' as a parent. |
|
1314 | 1314 | def _fail_nonlinear_revs(revs, skiprev, revtype): |
|
1315 | 1315 | badnodes = [repo[r].node() for r in revs if r != skiprev] |
|
1316 | 1316 | raise error.Abort( |
|
1317 | 1317 | _(b"cannot phabsend multiple %s revisions: %s") |
|
1318 | 1318 | % (revtype, scmutil.nodesummaries(repo, badnodes)), |
|
1319 | 1319 | hint=_(b"the revisions must form a linear chain"), |
|
1320 | 1320 | ) |
|
1321 | 1321 | |
|
1322 | 1322 | heads = repo.revs(b'heads(%ld)', revs) |
|
1323 | 1323 | if len(heads) > 1: |
|
1324 | 1324 | _fail_nonlinear_revs(heads, heads.max(), b"head") |
|
1325 | 1325 | |
|
1326 | 1326 | roots = repo.revs(b'roots(%ld)', revs) |
|
1327 | 1327 | if len(roots) > 1: |
|
1328 | 1328 | _fail_nonlinear_revs(roots, roots.min(), b"root") |
|
1329 | 1329 | |
|
1330 | 1330 | fold = opts.get(b'fold') |
|
1331 | 1331 | if fold: |
|
1332 | 1332 | if len(revs) == 1: |
|
1333 | 1333 | # TODO: just switch to --no-fold instead? |
|
1334 | 1334 | raise error.Abort(_(b"cannot fold a single revision")) |
|
1335 | 1335 | |
|
1336 | 1336 | # There's no clear way to manage multiple commits with a Dxxx tag, so |
|
1337 | 1337 | # require the amend option. (We could append "_nnn", but then it |
|
1338 | 1338 | # becomes jumbled if earlier commits are added to an update.) It should |
|
1339 | 1339 | # lock the repo and ensure that the range is editable, but that would |
|
1340 | 1340 | # make the code pretty convoluted. The default behavior of `arc` is to |
|
1341 | 1341 | # create a new review anyway. |
|
1342 | 1342 | if not opts.get(b"amend"): |
|
1343 | 1343 | raise error.Abort(_(b"cannot fold with --no-amend")) |
|
1344 | 1344 | |
|
1345 | 1345 | # It might be possible to bucketize the revisions by the DREV value, and |
|
1346 | 1346 | # iterate over those groups when posting, and then again when amending. |
|
1347 | 1347 | # But for simplicity, require all selected revisions to be for the same |
|
1348 | 1348 | # DREV (if present). Adding local revisions to an existing DREV is |
|
1349 | 1349 | # acceptable. |
|
1350 | 1350 | drevmatchers = [ |
|
1351 | 1351 | _differentialrevisiondescre.search(ctx.description()) |
|
1352 | 1352 | for ctx in ctxs |
|
1353 | 1353 | ] |
|
1354 | 1354 | if len({m.group('url') for m in drevmatchers if m}) > 1: |
|
1355 | 1355 | raise error.Abort( |
|
1356 | 1356 | _(b"cannot fold revisions with different DREV values") |
|
1357 | 1357 | ) |
|
1358 | 1358 | |
|
1359 | 1359 | # {newnode: (oldnode, olddiff, olddrev} |
|
1360 | 1360 | oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
|
1361 | 1361 | |
|
1362 | 1362 | confirm = ui.configbool(b'phabsend', b'confirm') |
|
1363 | 1363 | confirm |= bool(opts.get(b'confirm')) |
|
1364 | 1364 | if confirm: |
|
1365 | 1365 | confirmed = _confirmbeforesend(repo, revs, oldmap) |
|
1366 | 1366 | if not confirmed: |
|
1367 | 1367 | raise error.Abort(_(b'phabsend cancelled')) |
|
1368 | 1368 | |
|
1369 | 1369 | actions = [] |
|
1370 | 1370 | reviewers = opts.get(b'reviewer', []) |
|
1371 | 1371 | blockers = opts.get(b'blocker', []) |
|
1372 | 1372 | phids = [] |
|
1373 | 1373 | if reviewers: |
|
1374 | 1374 | phids.extend(userphids(repo.ui, reviewers)) |
|
1375 | 1375 | if blockers: |
|
1376 | 1376 | phids.extend( |
|
1377 | 1377 | map( |
|
1378 | 1378 | lambda phid: b'blocking(%s)' % phid, |
|
1379 | 1379 | userphids(repo.ui, blockers), |
|
1380 | 1380 | ) |
|
1381 | 1381 | ) |
|
1382 | 1382 | if phids: |
|
1383 | 1383 | actions.append({b'type': b'reviewers.add', b'value': phids}) |
|
1384 | 1384 | |
|
1385 | 1385 | drevids = [] # [int] |
|
1386 | 1386 | diffmap = {} # {newnode: diff} |
|
1387 | 1387 | |
|
1388 | 1388 | # Send patches one by one so we know their Differential Revision PHIDs and |
|
1389 | 1389 | # can provide dependency relationship |
|
1390 | 1390 | lastrevphid = None |
|
1391 | 1391 | for ctx in ctxs: |
|
1392 | 1392 | if fold: |
|
1393 | 1393 | ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev())) |
|
1394 | 1394 | else: |
|
1395 | 1395 | ui.debug(b'sending rev %d\n' % ctx.rev()) |
|
1396 | 1396 | |
|
1397 | 1397 | # Get Differential Revision ID |
|
1398 | 1398 | oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
|
1399 | 1399 | oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid |
|
1400 | 1400 | |
|
1401 | 1401 | if fold: |
|
1402 | 1402 | oldbasenode, oldbasediff, oldbaserevid = oldmap.get( |
|
1403 | 1403 | ctxs[-1].node(), (None, None, None) |
|
1404 | 1404 | ) |
|
1405 | 1405 | |
|
1406 | 1406 | if oldnode != ctx.node() or opts.get(b'amend'): |
|
1407 | 1407 | # Create or update Differential Revision |
|
1408 | 1408 | revision, diff = createdifferentialrevision( |
|
1409 | 1409 | ctxs if fold else [ctx], |
|
1410 | 1410 | revid, |
|
1411 | 1411 | lastrevphid, |
|
1412 | 1412 | oldbasenode, |
|
1413 | 1413 | oldnode, |
|
1414 | 1414 | olddiff, |
|
1415 | 1415 | actions, |
|
1416 | 1416 | opts.get(b'comment'), |
|
1417 | 1417 | ) |
|
1418 | 1418 | |
|
1419 | 1419 | if fold: |
|
1420 | 1420 | for ctx in ctxs: |
|
1421 | 1421 | diffmap[ctx.node()] = diff |
|
1422 | 1422 | else: |
|
1423 | 1423 | diffmap[ctx.node()] = diff |
|
1424 | 1424 | |
|
1425 | 1425 | newrevid = int(revision[b'object'][b'id']) |
|
1426 | 1426 | newrevphid = revision[b'object'][b'phid'] |
|
1427 | 1427 | if revid: |
|
1428 | 1428 | action = b'updated' |
|
1429 | 1429 | else: |
|
1430 | 1430 | action = b'created' |
|
1431 | 1431 | |
|
1432 | 1432 | # Create a local tag to note the association, if commit message |
|
1433 | 1433 | # does not have it already |
|
1434 | 1434 | if not fold: |
|
1435 | 1435 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1436 | 1436 | if not m or int(m.group('id')) != newrevid: |
|
1437 | 1437 | tagname = b'D%d' % newrevid |
|
1438 | 1438 | tags.tag( |
|
1439 | 1439 | repo, |
|
1440 | 1440 | tagname, |
|
1441 | 1441 | ctx.node(), |
|
1442 | 1442 | message=None, |
|
1443 | 1443 | user=None, |
|
1444 | 1444 | date=None, |
|
1445 | 1445 | local=True, |
|
1446 | 1446 | ) |
|
1447 | 1447 | else: |
|
1448 | 1448 | # Nothing changed. But still set "newrevphid" so the next revision |
|
1449 | 1449 | # could depend on this one and "newrevid" for the summary line. |
|
1450 | 1450 | newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid'] |
|
1451 | 1451 | newrevid = revid |
|
1452 | 1452 | action = b'skipped' |
|
1453 | 1453 | |
|
1454 | 1454 | drevids.append(newrevid) |
|
1455 | 1455 | lastrevphid = newrevphid |
|
1456 | 1456 | |
|
1457 | 1457 | if fold: |
|
1458 | 1458 | for c in ctxs: |
|
1459 | 1459 | if oldmap.get(c.node(), (None, None, None))[2]: |
|
1460 | 1460 | action = b'updated' |
|
1461 | 1461 | else: |
|
1462 | 1462 | action = b'created' |
|
1463 | 1463 | _print_phabsend_action(ui, c, newrevid, action) |
|
1464 | 1464 | break |
|
1465 | 1465 | |
|
1466 | 1466 | _print_phabsend_action(ui, ctx, newrevid, action) |
|
1467 | 1467 | |
|
1468 | 1468 | # Update commit messages and remove tags |
|
1469 | 1469 | if opts.get(b'amend'): |
|
1470 | 1470 | unfi = repo.unfiltered() |
|
1471 | 1471 | drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) |
|
1472 | 1472 | with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
|
1473 | 1473 | # Eagerly evaluate commits to restabilize before creating new |
|
1474 | 1474 | # commits. The selected revisions are excluded because they are |
|
1475 | 1475 | # automatically restacked as part of the submission process. |
|
1476 | 1476 | restack = [ |
|
1477 | 1477 | c |
|
1478 | 1478 | for c in repo.set( |
|
1479 | 1479 | b"(%ld::) - (%ld) - unstable() - obsolete() - public()", |
|
1480 | 1480 | revs, |
|
1481 | 1481 | revs, |
|
1482 | 1482 | ) |
|
1483 | 1483 | ] |
|
1484 | 1484 | wnode = unfi[b'.'].node() |
|
1485 | 1485 | mapping = {} # {oldnode: [newnode]} |
|
1486 | 1486 | newnodes = [] |
|
1487 | 1487 | |
|
1488 | 1488 | drevid = drevids[0] |
|
1489 | 1489 | |
|
1490 | 1490 | for i, rev in enumerate(revs): |
|
1491 | 1491 | old = unfi[rev] |
|
1492 | 1492 | if not fold: |
|
1493 | 1493 | drevid = drevids[i] |
|
1494 | 1494 | drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
|
1495 | 1495 | |
|
1496 | 1496 | newdesc = get_amended_desc(drev, old, fold) |
|
1497 | 1497 | # Make sure commit message contain "Differential Revision" |
|
1498 | 1498 | if ( |
|
1499 | 1499 | old.description() != newdesc |
|
1500 | 1500 | or old.p1().node() in mapping |
|
1501 | 1501 | or old.p2().node() in mapping |
|
1502 | 1502 | ): |
|
1503 | 1503 | if old.phase() == phases.public: |
|
1504 | 1504 | ui.warn( |
|
1505 | 1505 | _(b"warning: not updating public commit %s\n") |
|
1506 | 1506 | % scmutil.formatchangeid(old) |
|
1507 | 1507 | ) |
|
1508 | 1508 | continue |
|
1509 | 1509 | parents = [ |
|
1510 | 1510 | mapping.get(old.p1().node(), (old.p1(),))[0], |
|
1511 | 1511 | mapping.get(old.p2().node(), (old.p2(),))[0], |
|
1512 | 1512 | ] |
|
1513 | 1513 | new = context.metadataonlyctx( |
|
1514 | 1514 | repo, |
|
1515 | 1515 | old, |
|
1516 | 1516 | parents=parents, |
|
1517 | 1517 | text=newdesc, |
|
1518 | 1518 | user=old.user(), |
|
1519 | 1519 | date=old.date(), |
|
1520 | 1520 | extra=old.extra(), |
|
1521 | 1521 | ) |
|
1522 | 1522 | |
|
1523 | 1523 | newnode = new.commit() |
|
1524 | 1524 | |
|
1525 | 1525 | mapping[old.node()] = [newnode] |
|
1526 | 1526 | |
|
1527 | 1527 | if fold: |
|
1528 | 1528 | # Defer updating the (single) Diff until all nodes are |
|
1529 | 1529 | # collected. No tags were created, so none need to be |
|
1530 | 1530 | # removed. |
|
1531 | 1531 | newnodes.append(newnode) |
|
1532 | 1532 | continue |
|
1533 | 1533 | |
|
1534 | 1534 | _amend_diff_properties( |
|
1535 | 1535 | unfi, drevid, [newnode], diffmap[old.node()] |
|
1536 | 1536 | ) |
|
1537 | 1537 | |
|
1538 | 1538 | # Remove local tags since it's no longer necessary |
|
1539 | 1539 | tagname = b'D%d' % drevid |
|
1540 | 1540 | if tagname in repo.tags(): |
|
1541 | 1541 | tags.tag( |
|
1542 | 1542 | repo, |
|
1543 | 1543 | tagname, |
|
1544 | 1544 | nullid, |
|
1545 | 1545 | message=None, |
|
1546 | 1546 | user=None, |
|
1547 | 1547 | date=None, |
|
1548 | 1548 | local=True, |
|
1549 | 1549 | ) |
|
1550 | 1550 | elif fold: |
|
1551 | 1551 | # When folding multiple commits into one review with |
|
1552 | 1552 | # --fold, track even the commits that weren't amended, so |
|
1553 | 1553 | # that their association isn't lost if the properties are |
|
1554 | 1554 | # rewritten below. |
|
1555 | 1555 | newnodes.append(old.node()) |
|
1556 | 1556 | |
|
1557 | 1557 | # If the submitted commits are public, no amend takes place so |
|
1558 | 1558 | # there are no newnodes and therefore no diff update to do. |
|
1559 | 1559 | if fold and newnodes: |
|
1560 | 1560 | diff = diffmap[old.node()] |
|
1561 | 1561 | |
|
1562 | 1562 | # The diff object in diffmap doesn't have the local commits |
|
1563 | 1563 | # because that could be returned from differential.creatediff, |
|
1564 | 1564 | # not differential.querydiffs. So use the queried diff (if |
|
1565 | 1565 | # present), or force the amend (a new revision is being posted.) |
|
1566 | 1566 | if not olddiff or set(newnodes) != getlocalcommits(olddiff): |
|
1567 | 1567 | _debug(ui, b"updating local commit list for D%d\n" % drevid) |
|
1568 | 1568 | _amend_diff_properties(unfi, drevid, newnodes, diff) |
|
1569 | 1569 | else: |
|
1570 | 1570 | _debug( |
|
1571 | 1571 | ui, |
|
1572 | 1572 | b"local commit list for D%d is already up-to-date\n" |
|
1573 | 1573 | % drevid, |
|
1574 | 1574 | ) |
|
1575 | 1575 | elif fold: |
|
1576 | 1576 | _debug(ui, b"no newnodes to update\n") |
|
1577 | 1577 | |
|
1578 | 1578 | # Restack any children of first-time submissions that were orphaned |
|
1579 | 1579 | # in the process. The ctx won't report that it is an orphan until |
|
1580 | 1580 | # the cleanup takes place below. |
|
1581 | 1581 | for old in restack: |
|
1582 | 1582 | parents = [ |
|
1583 | 1583 | mapping.get(old.p1().node(), (old.p1(),))[0], |
|
1584 | 1584 | mapping.get(old.p2().node(), (old.p2(),))[0], |
|
1585 | 1585 | ] |
|
1586 | 1586 | new = context.metadataonlyctx( |
|
1587 | 1587 | repo, |
|
1588 | 1588 | old, |
|
1589 | 1589 | parents=parents, |
|
1590 | 1590 | text=old.description(), |
|
1591 | 1591 | user=old.user(), |
|
1592 | 1592 | date=old.date(), |
|
1593 | 1593 | extra=old.extra(), |
|
1594 | 1594 | ) |
|
1595 | 1595 | |
|
1596 | 1596 | newnode = new.commit() |
|
1597 | 1597 | |
|
1598 | 1598 | # Don't obsolete unselected descendants of nodes that have not |
|
1599 | 1599 | # been changed in this transaction- that results in an error. |
|
1600 | 1600 | if newnode != old.node(): |
|
1601 | 1601 | mapping[old.node()] = [newnode] |
|
1602 | 1602 | _debug( |
|
1603 | 1603 | ui, |
|
1604 | 1604 | b"restabilizing %s as %s\n" |
|
1605 | 1605 | % (short(old.node()), short(newnode)), |
|
1606 | 1606 | ) |
|
1607 | 1607 | else: |
|
1608 | 1608 | _debug( |
|
1609 | 1609 | ui, |
|
1610 | 1610 | b"not restabilizing unchanged %s\n" % short(old.node()), |
|
1611 | 1611 | ) |
|
1612 | 1612 | |
|
1613 | 1613 | scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
|
1614 | 1614 | if wnode in mapping: |
|
1615 | 1615 | unfi.setparents(mapping[wnode][0]) |
|
1616 | 1616 | |
|
1617 | 1617 | |
|
1618 | 1618 | # Map from "hg:meta" keys to header understood by "hg import". The order is |
|
1619 | 1619 | # consistent with "hg export" output. |
|
1620 | 1620 | _metanamemap = util.sortdict( |
|
1621 | 1621 | [ |
|
1622 | 1622 | (b'user', b'User'), |
|
1623 | 1623 | (b'date', b'Date'), |
|
1624 | 1624 | (b'branch', b'Branch'), |
|
1625 | 1625 | (b'node', b'Node ID'), |
|
1626 | 1626 | (b'parent', b'Parent '), |
|
1627 | 1627 | ] |
|
1628 | 1628 | ) |
|
1629 | 1629 | |
|
1630 | 1630 | |
|
1631 | 1631 | def _confirmbeforesend(repo, revs, oldmap): |
|
1632 | 1632 | url, token = readurltoken(repo.ui) |
|
1633 | 1633 | ui = repo.ui |
|
1634 | 1634 | for rev in revs: |
|
1635 | 1635 | ctx = repo[rev] |
|
1636 | 1636 | desc = ctx.description().splitlines()[0] |
|
1637 | 1637 | oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
|
1638 | 1638 | if drevid: |
|
1639 | 1639 | drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev') |
|
1640 | 1640 | else: |
|
1641 | 1641 | drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
|
1642 | 1642 | |
|
1643 | 1643 | ui.write( |
|
1644 | 1644 | _(b'%s - %s: %s\n') |
|
1645 | 1645 | % ( |
|
1646 | 1646 | drevdesc, |
|
1647 | 1647 | ui.label(bytes(ctx), b'phabricator.node'), |
|
1648 | 1648 | ui.label(desc, b'phabricator.desc'), |
|
1649 | 1649 | ) |
|
1650 | 1650 | ) |
|
1651 | 1651 | |
|
1652 | 1652 | if ui.promptchoice( |
|
1653 | 1653 | _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url |
|
1654 | 1654 | ): |
|
1655 | 1655 | return False |
|
1656 | 1656 | |
|
1657 | 1657 | return True |
|
1658 | 1658 | |
|
1659 | 1659 | |
|
1660 | 1660 | _knownstatusnames = { |
|
1661 | 1661 | b'accepted', |
|
1662 | 1662 | b'needsreview', |
|
1663 | 1663 | b'needsrevision', |
|
1664 | 1664 | b'closed', |
|
1665 | 1665 | b'abandoned', |
|
1666 | 1666 | b'changesplanned', |
|
1667 | 1667 | } |
|
1668 | 1668 | |
|
1669 | 1669 | |
|
1670 | 1670 | def _getstatusname(drev): |
|
1671 | 1671 | """get normalized status name from a Differential Revision""" |
|
1672 | 1672 | return drev[b'statusName'].replace(b' ', b'').lower() |
|
1673 | 1673 | |
|
1674 | 1674 | |
|
1675 | 1675 | # Small language to specify differential revisions. Support symbols: (), :X, |
|
1676 | 1676 | # +, and -. |
|
1677 | 1677 | |
|
1678 | 1678 | _elements = { |
|
1679 | 1679 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
1680 | 1680 | b'(': (12, None, (b'group', 1, b')'), None, None), |
|
1681 | 1681 | b':': (8, None, (b'ancestors', 8), None, None), |
|
1682 | 1682 | b'&': (5, None, None, (b'and_', 5), None), |
|
1683 | 1683 | b'+': (4, None, None, (b'add', 4), None), |
|
1684 | 1684 | b'-': (4, None, None, (b'sub', 4), None), |
|
1685 | 1685 | b')': (0, None, None, None, None), |
|
1686 | 1686 | b'symbol': (0, b'symbol', None, None, None), |
|
1687 | 1687 | b'end': (0, None, None, None, None), |
|
1688 | 1688 | } |
|
1689 | 1689 | |
|
1690 | 1690 | |
|
1691 | 1691 | def _tokenize(text): |
|
1692 | 1692 | view = memoryview(text) # zero-copy slice |
|
1693 | 1693 | special = b'():+-& ' |
|
1694 | 1694 | pos = 0 |
|
1695 | 1695 | length = len(text) |
|
1696 | 1696 | while pos < length: |
|
1697 | 1697 | symbol = b''.join( |
|
1698 | 1698 | itertools.takewhile( |
|
1699 | 1699 | lambda ch: ch not in special, pycompat.iterbytestr(view[pos:]) |
|
1700 | 1700 | ) |
|
1701 | 1701 | ) |
|
1702 | 1702 | if symbol: |
|
1703 | 1703 | yield (b'symbol', symbol, pos) |
|
1704 | 1704 | pos += len(symbol) |
|
1705 | 1705 | else: # special char, ignore space |
|
1706 | 1706 | if text[pos : pos + 1] != b' ': |
|
1707 | 1707 | yield (text[pos : pos + 1], None, pos) |
|
1708 | 1708 | pos += 1 |
|
1709 | 1709 | yield (b'end', None, pos) |
|
1710 | 1710 | |
|
1711 | 1711 | |
|
1712 | 1712 | def _parse(text): |
|
1713 | 1713 | tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
|
1714 | 1714 | if pos != len(text): |
|
1715 | 1715 | raise error.ParseError(b'invalid token', pos) |
|
1716 | 1716 | return tree |
|
1717 | 1717 | |
|
1718 | 1718 | |
|
1719 | 1719 | def _parsedrev(symbol): |
|
1720 | 1720 | """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
|
1721 | 1721 | if symbol.startswith(b'D') and symbol[1:].isdigit(): |
|
1722 | 1722 | return int(symbol[1:]) |
|
1723 | 1723 | if symbol.isdigit(): |
|
1724 | 1724 | return int(symbol) |
|
1725 | 1725 | |
|
1726 | 1726 | |
|
1727 | 1727 | def _prefetchdrevs(tree): |
|
1728 | 1728 | """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
|
1729 | 1729 | drevs = set() |
|
1730 | 1730 | ancestordrevs = set() |
|
1731 | 1731 | op = tree[0] |
|
1732 | 1732 | if op == b'symbol': |
|
1733 | 1733 | r = _parsedrev(tree[1]) |
|
1734 | 1734 | if r: |
|
1735 | 1735 | drevs.add(r) |
|
1736 | 1736 | elif op == b'ancestors': |
|
1737 | 1737 | r, a = _prefetchdrevs(tree[1]) |
|
1738 | 1738 | drevs.update(r) |
|
1739 | 1739 | ancestordrevs.update(r) |
|
1740 | 1740 | ancestordrevs.update(a) |
|
1741 | 1741 | else: |
|
1742 | 1742 | for t in tree[1:]: |
|
1743 | 1743 | r, a = _prefetchdrevs(t) |
|
1744 | 1744 | drevs.update(r) |
|
1745 | 1745 | ancestordrevs.update(a) |
|
1746 | 1746 | return drevs, ancestordrevs |
|
1747 | 1747 | |
|
1748 | 1748 | |
|
1749 | 1749 | def querydrev(ui, spec): |
|
1750 | 1750 | """return a list of "Differential Revision" dicts |
|
1751 | 1751 | |
|
1752 | 1752 | spec is a string using a simple query language, see docstring in phabread |
|
1753 | 1753 | for details. |
|
1754 | 1754 | |
|
1755 | 1755 | A "Differential Revision dict" looks like: |
|
1756 | 1756 | |
|
1757 | 1757 | { |
|
1758 | 1758 | "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72", |
|
1759 | 1759 | "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye", |
|
1760 | 1760 | "auxiliary": { |
|
1761 | 1761 | "phabricator:depends-on": [ |
|
1762 | 1762 | "PHID-DREV-gbapp366kutjebt7agcd" |
|
1763 | 1763 | ] |
|
1764 | 1764 | "phabricator:projects": [], |
|
1765 | 1765 | }, |
|
1766 | 1766 | "branch": "default", |
|
1767 | 1767 | "ccs": [], |
|
1768 | 1768 | "commits": [], |
|
1769 | 1769 | "dateCreated": "1499181406", |
|
1770 | 1770 | "dateModified": "1499182103", |
|
1771 | 1771 | "diffs": [ |
|
1772 | 1772 | "3", |
|
1773 | 1773 | "4", |
|
1774 | 1774 | ], |
|
1775 | 1775 | "hashes": [], |
|
1776 | 1776 | "id": "2", |
|
1777 | 1777 | "lineCount": "2", |
|
1778 | 1778 | "phid": "PHID-DREV-672qvysjcczopag46qty", |
|
1779 | 1779 | "properties": {}, |
|
1780 | 1780 | "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv", |
|
1781 | 1781 | "reviewers": [], |
|
1782 | 1782 | "sourcePath": null |
|
1783 | 1783 | "status": "0", |
|
1784 | 1784 | "statusName": "Needs Review", |
|
1785 | 1785 | "summary": "", |
|
1786 | 1786 | "testPlan": "", |
|
1787 | 1787 | "title": "example", |
|
1788 | 1788 | "uri": "https://phab.example.com/D2", |
|
1789 | 1789 | } |
|
1790 | 1790 | """ |
|
1791 | 1791 | # TODO: replace differential.query and differential.querydiffs with |
|
1792 | 1792 | # differential.diff.search because the former (and their output) are |
|
1793 | 1793 | # frozen, and planned to be deprecated and removed. |
|
1794 | 1794 | |
|
1795 | 1795 | def fetch(params): |
|
1796 | 1796 | """params -> single drev or None""" |
|
1797 | 1797 | key = (params.get(b'ids') or params.get(b'phids') or [None])[0] |
|
1798 | 1798 | if key in prefetched: |
|
1799 | 1799 | return prefetched[key] |
|
1800 | 1800 | drevs = callconduit(ui, b'differential.query', params) |
|
1801 | 1801 | # Fill prefetched with the result |
|
1802 | 1802 | for drev in drevs: |
|
1803 | 1803 | prefetched[drev[b'phid']] = drev |
|
1804 | 1804 | prefetched[int(drev[b'id'])] = drev |
|
1805 | 1805 | if key not in prefetched: |
|
1806 | 1806 | raise error.Abort( |
|
1807 | 1807 | _(b'cannot get Differential Revision %r') % params |
|
1808 | 1808 | ) |
|
1809 | 1809 | return prefetched[key] |
|
1810 | 1810 | |
|
1811 | 1811 | def getstack(topdrevids): |
|
1812 | 1812 | """given a top, get a stack from the bottom, [id] -> [id]""" |
|
1813 | 1813 | visited = set() |
|
1814 | 1814 | result = [] |
|
1815 | 1815 | queue = [{b'ids': [i]} for i in topdrevids] |
|
1816 | 1816 | while queue: |
|
1817 | 1817 | params = queue.pop() |
|
1818 | 1818 | drev = fetch(params) |
|
1819 | 1819 | if drev[b'id'] in visited: |
|
1820 | 1820 | continue |
|
1821 | 1821 | visited.add(drev[b'id']) |
|
1822 | 1822 | result.append(int(drev[b'id'])) |
|
1823 | 1823 | auxiliary = drev.get(b'auxiliary', {}) |
|
1824 | 1824 | depends = auxiliary.get(b'phabricator:depends-on', []) |
|
1825 | 1825 | for phid in depends: |
|
1826 | 1826 | queue.append({b'phids': [phid]}) |
|
1827 | 1827 | result.reverse() |
|
1828 | 1828 | return smartset.baseset(result) |
|
1829 | 1829 | |
|
1830 | 1830 | # Initialize prefetch cache |
|
1831 | 1831 | prefetched = {} # {id or phid: drev} |
|
1832 | 1832 | |
|
1833 | 1833 | tree = _parse(spec) |
|
1834 | 1834 | drevs, ancestordrevs = _prefetchdrevs(tree) |
|
1835 | 1835 | |
|
1836 | 1836 | # developer config: phabricator.batchsize |
|
1837 | 1837 | batchsize = ui.configint(b'phabricator', b'batchsize') |
|
1838 | 1838 | |
|
1839 | 1839 | # Prefetch Differential Revisions in batch |
|
1840 | 1840 | tofetch = set(drevs) |
|
1841 | 1841 | for r in ancestordrevs: |
|
1842 | 1842 | tofetch.update(range(max(1, r - batchsize), r + 1)) |
|
1843 | 1843 | if drevs: |
|
1844 | 1844 | fetch({b'ids': list(tofetch)}) |
|
1845 | 1845 | validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) |
|
1846 | 1846 | |
|
1847 | 1847 | # Walk through the tree, return smartsets |
|
1848 | 1848 | def walk(tree): |
|
1849 | 1849 | op = tree[0] |
|
1850 | 1850 | if op == b'symbol': |
|
1851 | 1851 | drev = _parsedrev(tree[1]) |
|
1852 | 1852 | if drev: |
|
1853 | 1853 | return smartset.baseset([drev]) |
|
1854 | 1854 | elif tree[1] in _knownstatusnames: |
|
1855 | 1855 | drevs = [ |
|
1856 | 1856 | r |
|
1857 | 1857 | for r in validids |
|
1858 | 1858 | if _getstatusname(prefetched[r]) == tree[1] |
|
1859 | 1859 | ] |
|
1860 | 1860 | return smartset.baseset(drevs) |
|
1861 | 1861 | else: |
|
1862 | 1862 | raise error.Abort(_(b'unknown symbol: %s') % tree[1]) |
|
1863 | 1863 | elif op in {b'and_', b'add', b'sub'}: |
|
1864 | 1864 | assert len(tree) == 3 |
|
1865 | 1865 | return getattr(operator, op)(walk(tree[1]), walk(tree[2])) |
|
1866 | 1866 | elif op == b'group': |
|
1867 | 1867 | return walk(tree[1]) |
|
1868 | 1868 | elif op == b'ancestors': |
|
1869 | 1869 | return getstack(walk(tree[1])) |
|
1870 | 1870 | else: |
|
1871 | 1871 | raise error.ProgrammingError(b'illegal tree: %r' % tree) |
|
1872 | 1872 | |
|
1873 | 1873 | return [prefetched[r] for r in walk(tree)] |
|
1874 | 1874 | |
|
1875 | 1875 | |
|
1876 | 1876 | def getdescfromdrev(drev): |
|
1877 | 1877 | """get description (commit message) from "Differential Revision" |
|
1878 | 1878 | |
|
1879 | 1879 | This is similar to differential.getcommitmessage API. But we only care |
|
1880 | 1880 | about limited fields: title, summary, test plan, and URL. |
|
1881 | 1881 | """ |
|
1882 | 1882 | title = drev[b'title'] |
|
1883 | 1883 | summary = drev[b'summary'].rstrip() |
|
1884 | 1884 | testplan = drev[b'testPlan'].rstrip() |
|
1885 | 1885 | if testplan: |
|
1886 | 1886 | testplan = b'Test Plan:\n%s' % testplan |
|
1887 | 1887 | uri = b'Differential Revision: %s' % drev[b'uri'] |
|
1888 | 1888 | return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) |
|
1889 | 1889 | |
|
1890 | 1890 | |
|
1891 | 1891 | def get_amended_desc(drev, ctx, folded): |
|
1892 | 1892 | """similar to ``getdescfromdrev``, but supports a folded series of commits |
|
1893 | 1893 | |
|
1894 | 1894 | This is used when determining if an individual commit needs to have its |
|
1895 | 1895 | message amended after posting it for review. The determination is made for |
|
1896 | 1896 | each individual commit, even when they were folded into one review. |
|
1897 | 1897 | """ |
|
1898 | 1898 | if not folded: |
|
1899 | 1899 | return getdescfromdrev(drev) |
|
1900 | 1900 | |
|
1901 | 1901 | uri = b'Differential Revision: %s' % drev[b'uri'] |
|
1902 | 1902 | |
|
1903 | 1903 | # Since the commit messages were combined when posting multiple commits |
|
1904 | 1904 | # with --fold, the fields can't be read from Phabricator here, or *all* |
|
1905 | 1905 | # affected local revisions will end up with the same commit message after |
|
1906 | 1906 | # the URI is amended in. Append in the DREV line, or update it if it |
|
1907 | 1907 | # exists. At worst, this means commit message or test plan updates on |
|
1908 | 1908 | # Phabricator aren't propagated back to the repository, but that seems |
|
1909 | 1909 | # reasonable for the case where local commits are effectively combined |
|
1910 | 1910 | # in Phabricator. |
|
1911 | 1911 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1912 | 1912 | if not m: |
|
1913 | 1913 | return b'\n\n'.join([ctx.description(), uri]) |
|
1914 | 1914 | |
|
1915 | 1915 | return _differentialrevisiondescre.sub(uri, ctx.description()) |
|
1916 | 1916 | |
|
1917 | 1917 | |
|
1918 | 1918 | def getlocalcommits(diff): |
|
1919 | 1919 | """get the set of local commits from a diff object |
|
1920 | 1920 | |
|
1921 | 1921 | See ``getdiffmeta()`` for an example diff object. |
|
1922 | 1922 | """ |
|
1923 | 1923 | props = diff.get(b'properties') or {} |
|
1924 | 1924 | commits = props.get(b'local:commits') or {} |
|
1925 | 1925 | if len(commits) > 1: |
|
1926 | 1926 | return {bin(c) for c in commits.keys()} |
|
1927 | 1927 | |
|
1928 | 1928 | # Storing the diff metadata predates storing `local:commits`, so continue |
|
1929 | 1929 | # to use that in the --no-fold case. |
|
1930 | 1930 | return {bin(getdiffmeta(diff).get(b'node', b'')) or None} |
|
1931 | 1931 | |
|
1932 | 1932 | |
|
1933 | 1933 | def getdiffmeta(diff): |
|
1934 | 1934 | """get commit metadata (date, node, user, p1) from a diff object |
|
1935 | 1935 | |
|
1936 | 1936 | The metadata could be "hg:meta", sent by phabsend, like: |
|
1937 | 1937 | |
|
1938 | 1938 | "properties": { |
|
1939 | 1939 | "hg:meta": { |
|
1940 | 1940 | "branch": "default", |
|
1941 | 1941 | "date": "1499571514 25200", |
|
1942 | 1942 | "node": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1943 | 1943 | "user": "Foo Bar <foo@example.com>", |
|
1944 | 1944 | "parent": "6d0abad76b30e4724a37ab8721d630394070fe16" |
|
1945 | 1945 | } |
|
1946 | 1946 | } |
|
1947 | 1947 | |
|
1948 | 1948 | Or converted from "local:commits", sent by "arc", like: |
|
1949 | 1949 | |
|
1950 | 1950 | "properties": { |
|
1951 | 1951 | "local:commits": { |
|
1952 | 1952 | "98c08acae292b2faf60a279b4189beb6cff1414d": { |
|
1953 | 1953 | "author": "Foo Bar", |
|
1954 | 1954 | "authorEmail": "foo@example.com" |
|
1955 | 1955 | "branch": "default", |
|
1956 | 1956 | "commit": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1957 | 1957 | "local": "1000", |
|
1958 | 1958 | "message": "...", |
|
1959 | 1959 | "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"], |
|
1960 | 1960 | "rev": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1961 | 1961 | "summary": "...", |
|
1962 | 1962 | "tag": "", |
|
1963 | 1963 | "time": 1499546314, |
|
1964 | 1964 | } |
|
1965 | 1965 | } |
|
1966 | 1966 | } |
|
1967 | 1967 | |
|
1968 | 1968 | Note: metadata extracted from "local:commits" will lose time zone |
|
1969 | 1969 | information. |
|
1970 | 1970 | """ |
|
1971 | 1971 | props = diff.get(b'properties') or {} |
|
1972 | 1972 | meta = props.get(b'hg:meta') |
|
1973 | 1973 | if not meta: |
|
1974 | 1974 | if props.get(b'local:commits'): |
|
1975 | 1975 | commit = sorted(props[b'local:commits'].values())[0] |
|
1976 | 1976 | meta = {} |
|
1977 | 1977 | if b'author' in commit and b'authorEmail' in commit: |
|
1978 | 1978 | meta[b'user'] = b'%s <%s>' % ( |
|
1979 | 1979 | commit[b'author'], |
|
1980 | 1980 | commit[b'authorEmail'], |
|
1981 | 1981 | ) |
|
1982 | 1982 | if b'time' in commit: |
|
1983 | 1983 | meta[b'date'] = b'%d 0' % int(commit[b'time']) |
|
1984 | 1984 | if b'branch' in commit: |
|
1985 | 1985 | meta[b'branch'] = commit[b'branch'] |
|
1986 | 1986 | node = commit.get(b'commit', commit.get(b'rev')) |
|
1987 | 1987 | if node: |
|
1988 | 1988 | meta[b'node'] = node |
|
1989 | 1989 | if len(commit.get(b'parents', ())) >= 1: |
|
1990 | 1990 | meta[b'parent'] = commit[b'parents'][0] |
|
1991 | 1991 | else: |
|
1992 | 1992 | meta = {} |
|
1993 | 1993 | if b'date' not in meta and b'dateCreated' in diff: |
|
1994 | 1994 | meta[b'date'] = b'%s 0' % diff[b'dateCreated'] |
|
1995 | 1995 | if b'branch' not in meta and diff.get(b'branch'): |
|
1996 | 1996 | meta[b'branch'] = diff[b'branch'] |
|
1997 | 1997 | if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'): |
|
1998 | 1998 | meta[b'parent'] = diff[b'sourceControlBaseRevision'] |
|
1999 | 1999 | return meta |
|
2000 | 2000 | |
|
2001 | 2001 | |
|
2002 | 2002 | def _getdrevs(ui, stack, specs): |
|
2003 | 2003 | """convert user supplied DREVSPECs into "Differential Revision" dicts |
|
2004 | 2004 | |
|
2005 | 2005 | See ``hg help phabread`` for how to specify each DREVSPEC. |
|
2006 | 2006 | """ |
|
2007 | 2007 | if len(specs) > 0: |
|
2008 | 2008 | |
|
2009 | 2009 | def _formatspec(s): |
|
2010 | 2010 | if stack: |
|
2011 | 2011 | s = b':(%s)' % s |
|
2012 | 2012 | return b'(%s)' % s |
|
2013 | 2013 | |
|
2014 | 2014 | spec = b'+'.join(pycompat.maplist(_formatspec, specs)) |
|
2015 | 2015 | |
|
2016 | 2016 | drevs = querydrev(ui, spec) |
|
2017 | 2017 | if drevs: |
|
2018 | 2018 | return drevs |
|
2019 | 2019 | |
|
2020 | 2020 | raise error.Abort(_(b"empty DREVSPEC set")) |
|
2021 | 2021 | |
|
2022 | 2022 | |
|
2023 | 2023 | def readpatch(ui, drevs, write): |
|
2024 | 2024 | """generate plain-text patch readable by 'hg import' |
|
2025 | 2025 | |
|
2026 | 2026 | write takes a list of (DREV, bytes), where DREV is the differential number |
|
2027 | 2027 | (as bytes, without the "D" prefix) and the bytes are the text of a patch |
|
2028 | 2028 | to be imported. drevs is what "querydrev" returns, results of |
|
2029 | 2029 | "differential.query". |
|
2030 | 2030 | """ |
|
2031 | 2031 | # Prefetch hg:meta property for all diffs |
|
2032 | 2032 | diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs}) |
|
2033 | 2033 | diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) |
|
2034 | 2034 | |
|
2035 | 2035 | patches = [] |
|
2036 | 2036 | |
|
2037 | 2037 | # Generate patch for each drev |
|
2038 | 2038 | for drev in drevs: |
|
2039 | 2039 | ui.note(_(b'reading D%s\n') % drev[b'id']) |
|
2040 | 2040 | |
|
2041 | 2041 | diffid = max(int(v) for v in drev[b'diffs']) |
|
2042 | 2042 | body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid}) |
|
2043 | 2043 | desc = getdescfromdrev(drev) |
|
2044 | 2044 | header = b'# HG changeset patch\n' |
|
2045 | 2045 | |
|
2046 | 2046 | # Try to preserve metadata from hg:meta property. Write hg patch |
|
2047 | 2047 | # headers that can be read by the "import" command. See patchheadermap |
|
2048 | 2048 | # and extract in mercurial/patch.py for supported headers. |
|
2049 | 2049 | meta = getdiffmeta(diffs[b'%d' % diffid]) |
|
2050 | 2050 | for k in _metanamemap.keys(): |
|
2051 | 2051 | if k in meta: |
|
2052 | 2052 | header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
|
2053 | 2053 | |
|
2054 | 2054 | content = b'%s%s\n%s' % (header, desc, body) |
|
2055 | 2055 | patches.append((drev[b'id'], content)) |
|
2056 | 2056 | |
|
2057 | 2057 | # Write patches to the supplied callback |
|
2058 | 2058 | write(patches) |
|
2059 | 2059 | |
|
2060 | 2060 | |
|
2061 | 2061 | @vcrcommand( |
|
2062 | 2062 | b'phabread', |
|
2063 | 2063 | [(b'', b'stack', False, _(b'read dependencies'))], |
|
2064 | 2064 | _(b'DREVSPEC... [OPTIONS]'), |
|
2065 | 2065 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
2066 | 2066 | optionalrepo=True, |
|
2067 | 2067 | ) |
|
2068 | 2068 | def phabread(ui, repo, *specs, **opts): |
|
2069 | 2069 | """print patches from Phabricator suitable for importing |
|
2070 | 2070 | |
|
2071 | 2071 | DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
|
2072 | 2072 | the number ``123``. It could also have common operators like ``+``, ``-``, |
|
2073 | 2073 | ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to |
|
2074 | 2074 | select a stack. If multiple DREVSPEC values are given, the result is the |
|
2075 | 2075 | union of each individually evaluated value. No attempt is currently made |
|
2076 | 2076 | to reorder the values to run from parent to child. |
|
2077 | 2077 | |
|
2078 | 2078 | ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision`` |
|
2079 | 2079 | could be used to filter patches by status. For performance reason, they |
|
2080 | 2080 | only represent a subset of non-status selections and cannot be used alone. |
|
2081 | 2081 | |
|
2082 | 2082 | For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude |
|
2083 | 2083 | D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a |
|
2084 | 2084 | stack up to D9. |
|
2085 | 2085 | |
|
2086 | 2086 | If --stack is given, follow dependencies information and read all patches. |
|
2087 | 2087 | It is equivalent to the ``:`` operator. |
|
2088 | 2088 | """ |
|
2089 | 2089 | opts = pycompat.byteskwargs(opts) |
|
2090 | 2090 | drevs = _getdrevs(ui, opts.get(b'stack'), specs) |
|
2091 | 2091 | |
|
2092 | 2092 | def _write(patches): |
|
2093 | 2093 | for drev, content in patches: |
|
2094 | 2094 | ui.write(content) |
|
2095 | 2095 | |
|
2096 | 2096 | readpatch(ui, drevs, _write) |
|
2097 | 2097 | |
|
2098 | 2098 | |
|
2099 | 2099 | @vcrcommand( |
|
2100 | 2100 | b'phabimport', |
|
2101 | 2101 | [(b'', b'stack', False, _(b'import dependencies as well'))], |
|
2102 | 2102 | _(b'DREVSPEC... [OPTIONS]'), |
|
2103 | 2103 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
2104 | 2104 | ) |
|
2105 | 2105 | def phabimport(ui, repo, *specs, **opts): |
|
2106 | 2106 | """import patches from Phabricator for the specified Differential Revisions |
|
2107 | 2107 | |
|
2108 | 2108 | The patches are read and applied starting at the parent of the working |
|
2109 | 2109 | directory. |
|
2110 | 2110 | |
|
2111 | 2111 | See ``hg help phabread`` for how to specify DREVSPEC. |
|
2112 | 2112 | """ |
|
2113 | 2113 | opts = pycompat.byteskwargs(opts) |
|
2114 | 2114 | |
|
2115 | 2115 | # --bypass avoids losing exec and symlink bits when importing on Windows, |
|
2116 | 2116 | # and allows importing with a dirty wdir. It also aborts instead of leaving |
|
2117 | 2117 | # rejects. |
|
2118 | 2118 | opts[b'bypass'] = True |
|
2119 | 2119 | |
|
2120 | 2120 | # Mandatory default values, synced with commands.import |
|
2121 | 2121 | opts[b'strip'] = 1 |
|
2122 | 2122 | opts[b'prefix'] = b'' |
|
2123 | 2123 | # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone() |
|
2124 | 2124 | opts[b'obsolete'] = False |
|
2125 | 2125 | |
|
2126 | 2126 | if ui.configbool(b'phabimport', b'secret'): |
|
2127 | 2127 | opts[b'secret'] = True |
|
2128 | 2128 | if ui.configbool(b'phabimport', b'obsolete'): |
|
2129 | 2129 | opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone() |
|
2130 | 2130 | |
|
2131 | 2131 | def _write(patches): |
|
2132 | 2132 | parents = repo[None].parents() |
|
2133 | 2133 | |
|
2134 | 2134 | with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'): |
|
2135 | 2135 | for drev, contents in patches: |
|
2136 | 2136 | ui.status(_(b'applying patch from D%s\n') % drev) |
|
2137 | 2137 | |
|
2138 | 2138 | with patch.extract(ui, pycompat.bytesio(contents)) as patchdata: |
|
2139 | 2139 | msg, node, rej = cmdutil.tryimportone( |
|
2140 | 2140 | ui, |
|
2141 | 2141 | repo, |
|
2142 | 2142 | patchdata, |
|
2143 | 2143 | parents, |
|
2144 | 2144 | opts, |
|
2145 | 2145 | [], |
|
2146 | 2146 | None, # Never update wdir to another revision |
|
2147 | 2147 | ) |
|
2148 | 2148 | |
|
2149 | 2149 | if not node: |
|
2150 | 2150 | raise error.Abort(_(b'D%s: no diffs found') % drev) |
|
2151 | 2151 | |
|
2152 | 2152 | ui.note(msg + b'\n') |
|
2153 | 2153 | parents = [repo[node]] |
|
2154 | 2154 | |
|
2155 | 2155 | drevs = _getdrevs(ui, opts.get(b'stack'), specs) |
|
2156 | 2156 | |
|
2157 | 2157 | readpatch(repo.ui, drevs, _write) |
|
2158 | 2158 | |
|
2159 | 2159 | |
|
2160 | 2160 | @vcrcommand( |
|
2161 | 2161 | b'phabupdate', |
|
2162 | 2162 | [ |
|
2163 | 2163 | (b'', b'accept', False, _(b'accept revisions')), |
|
2164 | 2164 | (b'', b'reject', False, _(b'reject revisions')), |
|
2165 | 2165 | (b'', b'abandon', False, _(b'abandon revisions')), |
|
2166 | 2166 | (b'', b'reclaim', False, _(b'reclaim revisions')), |
|
2167 | (b'', b'plan-changes', False, _(b'plan changes for revisions')), | |
|
2167 | 2168 | (b'm', b'comment', b'', _(b'comment on the last revision')), |
|
2168 | 2169 | ], |
|
2169 | 2170 | _(b'DREVSPEC... [OPTIONS]'), |
|
2170 | 2171 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
2171 | 2172 | optionalrepo=True, |
|
2172 | 2173 | ) |
|
2173 | 2174 | def phabupdate(ui, repo, *specs, **opts): |
|
2174 | 2175 | """update Differential Revision in batch |
|
2175 | 2176 | |
|
2176 | 2177 | DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
|
2177 | 2178 | """ |
|
2178 | 2179 | opts = pycompat.byteskwargs(opts) |
|
2179 | flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] | |
|
2180 | transactions = [ | |
|
2181 | b'abandon', | |
|
2182 | b'accept', | |
|
2183 | b'plan-changes', | |
|
2184 | b'reclaim', | |
|
2185 | b'reject', | |
|
2186 | ] | |
|
2187 | flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))] | |
|
2180 | 2188 | if len(flags) > 1: |
|
2181 | 2189 | raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) |
|
2182 | 2190 | |
|
2183 | 2191 | actions = [] |
|
2184 | 2192 | for f in flags: |
|
2185 | 2193 | actions.append({b'type': f, b'value': True}) |
|
2186 | 2194 | |
|
2187 | 2195 | drevs = _getdrevs(ui, opts.get(b'stack'), specs) |
|
2188 | 2196 | for i, drev in enumerate(drevs): |
|
2189 | 2197 | if i + 1 == len(drevs) and opts.get(b'comment'): |
|
2190 | 2198 | actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
|
2191 | 2199 | if actions: |
|
2192 | 2200 | params = { |
|
2193 | 2201 | b'objectIdentifier': drev[b'phid'], |
|
2194 | 2202 | b'transactions': actions, |
|
2195 | 2203 | } |
|
2196 | 2204 | callconduit(ui, b'differential.revision.edit', params) |
|
2197 | 2205 | |
|
2198 | 2206 | |
|
2199 | 2207 | @eh.templatekeyword(b'phabreview', requires={b'ctx'}) |
|
2200 | 2208 | def template_review(context, mapping): |
|
2201 | 2209 | """:phabreview: Object describing the review for this changeset. |
|
2202 | 2210 | Has attributes `url` and `id`. |
|
2203 | 2211 | """ |
|
2204 | 2212 | ctx = context.resource(mapping, b'ctx') |
|
2205 | 2213 | m = _differentialrevisiondescre.search(ctx.description()) |
|
2206 | 2214 | if m: |
|
2207 | 2215 | return templateutil.hybriddict( |
|
2208 | 2216 | {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),} |
|
2209 | 2217 | ) |
|
2210 | 2218 | else: |
|
2211 | 2219 | tags = ctx.repo().nodetags(ctx.node()) |
|
2212 | 2220 | for t in tags: |
|
2213 | 2221 | if _differentialrevisiontagre.match(t): |
|
2214 | 2222 | url = ctx.repo().ui.config(b'phabricator', b'url') |
|
2215 | 2223 | if not url.endswith(b'/'): |
|
2216 | 2224 | url += b'/' |
|
2217 | 2225 | url += t |
|
2218 | 2226 | |
|
2219 | 2227 | return templateutil.hybriddict({b'url': url, b'id': t,}) |
|
2220 | 2228 | return None |
|
2221 | 2229 | |
|
2222 | 2230 | |
|
2223 | 2231 | @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'}) |
|
2224 | 2232 | def template_status(context, mapping): |
|
2225 | 2233 | """:phabstatus: String. Status of Phabricator differential. |
|
2226 | 2234 | """ |
|
2227 | 2235 | ctx = context.resource(mapping, b'ctx') |
|
2228 | 2236 | repo = context.resource(mapping, b'repo') |
|
2229 | 2237 | ui = context.resource(mapping, b'ui') |
|
2230 | 2238 | |
|
2231 | 2239 | rev = ctx.rev() |
|
2232 | 2240 | try: |
|
2233 | 2241 | drevid = getdrevmap(repo, [rev])[rev] |
|
2234 | 2242 | except KeyError: |
|
2235 | 2243 | return None |
|
2236 | 2244 | drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]}) |
|
2237 | 2245 | for drev in drevs: |
|
2238 | 2246 | if int(drev[b'id']) == drevid: |
|
2239 | 2247 | return templateutil.hybriddict( |
|
2240 | 2248 | {b'url': drev[b'uri'], b'status': drev[b'statusName'],} |
|
2241 | 2249 | ) |
|
2242 | 2250 | return None |
|
2243 | 2251 | |
|
2244 | 2252 | |
|
2245 | 2253 | @show.showview(b'phabstatus', csettopic=b'work') |
|
2246 | 2254 | def phabstatusshowview(ui, repo, displayer): |
|
2247 | 2255 | """Phabricator differiential status""" |
|
2248 | 2256 | revs = repo.revs('sort(_underway(), topo)') |
|
2249 | 2257 | drevmap = getdrevmap(repo, revs) |
|
2250 | 2258 | unknownrevs, drevids, revsbydrevid = [], set(), {} |
|
2251 | 2259 | for rev, drevid in pycompat.iteritems(drevmap): |
|
2252 | 2260 | if drevid is not None: |
|
2253 | 2261 | drevids.add(drevid) |
|
2254 | 2262 | revsbydrevid.setdefault(drevid, set()).add(rev) |
|
2255 | 2263 | else: |
|
2256 | 2264 | unknownrevs.append(rev) |
|
2257 | 2265 | |
|
2258 | 2266 | drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)}) |
|
2259 | 2267 | drevsbyrev = {} |
|
2260 | 2268 | for drev in drevs: |
|
2261 | 2269 | for rev in revsbydrevid[int(drev[b'id'])]: |
|
2262 | 2270 | drevsbyrev[rev] = drev |
|
2263 | 2271 | |
|
2264 | 2272 | def phabstatus(ctx): |
|
2265 | 2273 | drev = drevsbyrev[ctx.rev()] |
|
2266 | 2274 | status = ui.label( |
|
2267 | 2275 | b'%(statusName)s' % drev, |
|
2268 | 2276 | b'phabricator.status.%s' % _getstatusname(drev), |
|
2269 | 2277 | ) |
|
2270 | 2278 | ui.write(b"\n%s %s\n" % (drev[b'uri'], status)) |
|
2271 | 2279 | |
|
2272 | 2280 | revs -= smartset.baseset(unknownrevs) |
|
2273 | 2281 | revdag = graphmod.dagwalker(repo, revs) |
|
2274 | 2282 | |
|
2275 | 2283 | ui.setconfig(b'experimental', b'graphshorten', True) |
|
2276 | 2284 | displayer._exthook = phabstatus |
|
2277 | 2285 | nodelen = show.longestshortest(repo, revs) |
|
2278 | 2286 | logcmdutil.displaygraph( |
|
2279 | 2287 | ui, |
|
2280 | 2288 | repo, |
|
2281 | 2289 | revdag, |
|
2282 | 2290 | displayer, |
|
2283 | 2291 | graphmod.asciiedges, |
|
2284 | 2292 | props={b'nodelen': nodelen}, |
|
2285 | 2293 | ) |
@@ -1,965 +1,969 b'' | |||
|
1 | 1 | #require vcr |
|
2 | 2 | $ cat >> $HGRCPATH <<EOF |
|
3 | 3 | > [extensions] |
|
4 | 4 | > phabricator = |
|
5 | 5 | > |
|
6 | 6 | > [auth] |
|
7 | 7 | > hgphab.schemes = https |
|
8 | 8 | > hgphab.prefix = phab.mercurial-scm.org |
|
9 | 9 | > # When working on the extension and making phabricator interaction |
|
10 | 10 | > # changes, edit this to be a real phabricator token. When done, edit |
|
11 | 11 | > # it back. The VCR transcripts will be auto-sanitised to replace your real |
|
12 | 12 | > # token with this value. |
|
13 | 13 | > hgphab.phabtoken = cli-hahayouwish |
|
14 | 14 | > |
|
15 | 15 | > [phabricator] |
|
16 | 16 | > debug = True |
|
17 | 17 | > EOF |
|
18 | 18 | $ hg init repo |
|
19 | 19 | $ cd repo |
|
20 | 20 | $ cat >> .hg/hgrc <<EOF |
|
21 | 21 | > [phabricator] |
|
22 | 22 | > url = https://phab.mercurial-scm.org/ |
|
23 | 23 | > callsign = HG |
|
24 | 24 | > EOF |
|
25 | 25 | $ VCR="$TESTDIR/phabricator" |
|
26 | 26 | |
|
27 | 27 | Error is handled reasonably. We override the phabtoken here so that |
|
28 | 28 | when you're developing changes to phabricator.py you can edit the |
|
29 | 29 | above config and have a real token in the test but not have to edit |
|
30 | 30 | this test. |
|
31 | 31 | $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \ |
|
32 | 32 | > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head |
|
33 | 33 | abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long. |
|
34 | 34 | |
|
35 | 35 | Missing arguments don't crash, and may print the command help |
|
36 | 36 | |
|
37 | 37 | $ hg debugcallconduit |
|
38 | 38 | hg debugcallconduit: invalid arguments |
|
39 | 39 | hg debugcallconduit METHOD |
|
40 | 40 | |
|
41 | 41 | call Conduit API |
|
42 | 42 | |
|
43 | 43 | options: |
|
44 | 44 | |
|
45 | 45 | (use 'hg debugcallconduit -h' to show more help) |
|
46 | 46 | [255] |
|
47 | 47 | $ hg phabread |
|
48 | 48 | abort: empty DREVSPEC set |
|
49 | 49 | [255] |
|
50 | 50 | |
|
51 | 51 | Basic phabread: |
|
52 | 52 | $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head |
|
53 | 53 | # HG changeset patch |
|
54 | 54 | # Date 1536771503 0 |
|
55 | 55 | # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a |
|
56 | 56 | exchangev2: start to implement pull with wire protocol v2 |
|
57 | 57 | |
|
58 | 58 | Wire protocol version 2 will take a substantially different |
|
59 | 59 | approach to exchange than version 1 (at least as far as pulling |
|
60 | 60 | is concerned). |
|
61 | 61 | |
|
62 | 62 | This commit establishes a new exchangev2 module for holding |
|
63 | 63 | |
|
64 | 64 | Phabread with multiple DREVSPEC |
|
65 | 65 | |
|
66 | 66 | TODO: attempt to order related revisions like --stack? |
|
67 | 67 | $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \ |
|
68 | 68 | > | grep '^Differential Revision' |
|
69 | 69 | Differential Revision: https://phab.mercurial-scm.org/D8205 |
|
70 | 70 | Differential Revision: https://phab.mercurial-scm.org/D8206 |
|
71 | 71 | Differential Revision: https://phab.mercurial-scm.org/D8207 |
|
72 | 72 | |
|
73 | 73 | Empty DREVSPECs don't crash |
|
74 | 74 | |
|
75 | 75 | $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917 |
|
76 | 76 | abort: empty DREVSPEC set |
|
77 | 77 | [255] |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | phabupdate with an accept: |
|
81 | 81 | $ hg phabupdate --accept D4564 \ |
|
82 | 82 | > -m 'I think I like where this is headed. Will read rest of series later.'\ |
|
83 | 83 | > --test-vcr "$VCR/accept-4564.json" |
|
84 | 84 | abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors: |
|
85 | 85 | - You can not accept this revision because it has already been closed. Only open revisions can be accepted. |
|
86 | 86 | [255] |
|
87 | 87 | $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json" |
|
88 | 88 | |
|
89 | phabupdate with --plan-changes: | |
|
90 | ||
|
91 | $ hg phabupdate --plan-changes D6876 --test-vcr "$VCR/phabupdate-change-6876.json" | |
|
92 | ||
|
89 | 93 | Create a differential diff: |
|
90 | 94 | $ HGENCODING=utf-8; export HGENCODING |
|
91 | 95 | $ echo alpha > alpha |
|
92 | 96 | $ hg ci --addremove -m 'create alpha for phabricator test β¬' |
|
93 | 97 | adding alpha |
|
94 | 98 | $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json" |
|
95 | 99 | D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc) |
|
96 | 100 | new commits: ['347bf67801e5'] |
|
97 | 101 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg |
|
98 | 102 | $ echo more >> alpha |
|
99 | 103 | $ HGEDITOR=true hg ci --amend |
|
100 | 104 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg |
|
101 | 105 | $ echo beta > beta |
|
102 | 106 | $ hg ci --addremove -m 'create beta for phabricator test' |
|
103 | 107 | adding beta |
|
104 | 108 | $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json" |
|
105 | 109 | c44b38f24a45 mapped to old nodes [] |
|
106 | 110 | D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc) |
|
107 | 111 | D7916 - created - 9e6901f21d5b: create beta for phabricator test |
|
108 | 112 | new commits: ['a692622e6937'] |
|
109 | 113 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg |
|
110 | 114 | $ unset HGENCODING |
|
111 | 115 | |
|
112 | 116 | The amend won't explode after posting a public commit. The local tag is left |
|
113 | 117 | behind to identify it. |
|
114 | 118 | |
|
115 | 119 | $ echo 'public change' > beta |
|
116 | 120 | $ hg ci -m 'create public change for phabricator testing' |
|
117 | 121 | $ hg phase --public . |
|
118 | 122 | $ echo 'draft change' > alpha |
|
119 | 123 | $ hg ci -m 'create draft change for phabricator testing' |
|
120 | 124 | $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json" |
|
121 | 125 | D7917 - created - 7b4185ab5d16: create public change for phabricator testing |
|
122 | 126 | D7918 - created - 251c1c333fc6: create draft change for phabricator testing |
|
123 | 127 | warning: not updating public commit 2:7b4185ab5d16 |
|
124 | 128 | new commits: ['3244dc4a3334'] |
|
125 | 129 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg |
|
126 | 130 | $ hg tags -v |
|
127 | 131 | tip 3:3244dc4a3334 |
|
128 | 132 | D7917 2:7b4185ab5d16 local |
|
129 | 133 | |
|
130 | 134 | $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF |
|
131 | 135 | > { |
|
132 | 136 | > "constraints": { |
|
133 | 137 | > "isBot": true |
|
134 | 138 | > } |
|
135 | 139 | > } |
|
136 | 140 | > EOF |
|
137 | 141 | { |
|
138 | 142 | "cursor": { |
|
139 | 143 | "after": null, |
|
140 | 144 | "before": null, |
|
141 | 145 | "limit": 100, |
|
142 | 146 | "order": null |
|
143 | 147 | }, |
|
144 | 148 | "data": [], |
|
145 | 149 | "maps": {}, |
|
146 | 150 | "query": { |
|
147 | 151 | "queryKey": null |
|
148 | 152 | } |
|
149 | 153 | } |
|
150 | 154 | |
|
151 | 155 | Template keywords |
|
152 | 156 | $ hg log -T'{rev} {phabreview|json}\n' |
|
153 | 157 | 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"} |
|
154 | 158 | 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"} |
|
155 | 159 | 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"} |
|
156 | 160 | 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"} |
|
157 | 161 | |
|
158 | 162 | $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' |
|
159 | 163 | 3 https://phab.mercurial-scm.org/D7918 D7918 |
|
160 | 164 | 2 https://phab.mercurial-scm.org/D7917 D7917 |
|
161 | 165 | 1 https://phab.mercurial-scm.org/D7916 D7916 |
|
162 | 166 | 0 https://phab.mercurial-scm.org/D7915 D7915 |
|
163 | 167 | |
|
164 | 168 | Commenting when phabsending: |
|
165 | 169 | $ echo comment > comment |
|
166 | 170 | $ hg ci --addremove -m "create comment for phabricator test" |
|
167 | 171 | adding comment |
|
168 | 172 | $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json" |
|
169 | 173 | D7919 - created - d5dddca9023d: create comment for phabricator test |
|
170 | 174 | new commits: ['f7db812bbe1d'] |
|
171 | 175 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg |
|
172 | 176 | $ echo comment2 >> comment |
|
173 | 177 | $ hg ci --amend |
|
174 | 178 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg |
|
175 | 179 | $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json" |
|
176 | 180 | 1849d7828727 mapped to old nodes [] |
|
177 | 181 | D7919 - updated - 1849d7828727: create comment for phabricator test |
|
178 | 182 | |
|
179 | 183 | Phabsending a skipped commit: |
|
180 | 184 | $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json" |
|
181 | 185 | 1849d7828727 mapped to old nodes ['1849d7828727'] |
|
182 | 186 | D7919 - skipped - 1849d7828727: create comment for phabricator test |
|
183 | 187 | |
|
184 | 188 | Phabsend doesn't create an instability when restacking existing revisions on top |
|
185 | 189 | of new revisions. |
|
186 | 190 | |
|
187 | 191 | $ hg init reorder |
|
188 | 192 | $ cd reorder |
|
189 | 193 | $ cat >> .hg/hgrc <<EOF |
|
190 | 194 | > [phabricator] |
|
191 | 195 | > url = https://phab.mercurial-scm.org/ |
|
192 | 196 | > callsign = HG |
|
193 | 197 | > [experimental] |
|
194 | 198 | > evolution = all |
|
195 | 199 | > EOF |
|
196 | 200 | |
|
197 | 201 | $ echo "add" > file1.txt |
|
198 | 202 | $ hg ci -Aqm 'added' |
|
199 | 203 | $ echo "mod1" > file1.txt |
|
200 | 204 | $ hg ci -m 'modified 1' |
|
201 | 205 | $ echo "mod2" > file1.txt |
|
202 | 206 | $ hg ci -m 'modified 2' |
|
203 | 207 | $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json" |
|
204 | 208 | D8433 - created - 5d3959e20d1d: modified 2 |
|
205 | 209 | new commits: ['2b4aa8a88d61'] |
|
206 | 210 | $ hg log -G -T compact |
|
207 | 211 | @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test |
|
208 | 212 | | modified 2 |
|
209 | 213 | | |
|
210 | 214 | o 1 d549263bcb2d 1970-01-01 00:00 +0000 test |
|
211 | 215 | | modified 1 |
|
212 | 216 | | |
|
213 | 217 | o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test |
|
214 | 218 | added |
|
215 | 219 | |
|
216 | 220 | Also check that it doesn't create more orphans outside of the stack |
|
217 | 221 | |
|
218 | 222 | $ hg up -q 1 |
|
219 | 223 | $ echo "mod3" > file1.txt |
|
220 | 224 | $ hg ci -m 'modified 3' |
|
221 | 225 | created new head |
|
222 | 226 | $ hg up -q 3 |
|
223 | 227 | $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json" |
|
224 | 228 | 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61'] |
|
225 | 229 | D8434 - created - d549263bcb2d: modified 1 |
|
226 | 230 | D8433 - updated - 2b4aa8a88d61: modified 2 |
|
227 | 231 | new commits: ['876a60d024de'] |
|
228 | 232 | new commits: ['0c6523cb1d0f'] |
|
229 | 233 | restabilizing 1eda4bf55021 as d2c78c3a3e01 |
|
230 | 234 | $ hg log -G -T compact |
|
231 | 235 | o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test |
|
232 | 236 | | modified 3 |
|
233 | 237 | | |
|
234 | 238 | | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test |
|
235 | 239 | |/ modified 2 |
|
236 | 240 | | |
|
237 | 241 | o 5:0 876a60d024de 1970-01-01 00:00 +0000 test |
|
238 | 242 | | modified 1 |
|
239 | 243 | | |
|
240 | 244 | o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test |
|
241 | 245 | added |
|
242 | 246 | |
|
243 | 247 | Posting obsolete commits is disallowed |
|
244 | 248 | |
|
245 | 249 | $ echo "mod3" > file1.txt |
|
246 | 250 | $ hg ci -m 'modified A' |
|
247 | 251 | $ echo "mod4" > file1.txt |
|
248 | 252 | $ hg ci -m 'modified B' |
|
249 | 253 | |
|
250 | 254 | $ hg up '.^' |
|
251 | 255 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
252 | 256 | $ echo 'obsolete' > file1.txt |
|
253 | 257 | $ hg amend --config extensions.amend= |
|
254 | 258 | 1 new orphan changesets |
|
255 | 259 | $ hg log -G |
|
256 | 260 | @ changeset: 10:082be6c94150 |
|
257 | 261 | | tag: tip |
|
258 | 262 | | parent: 6:0c6523cb1d0f |
|
259 | 263 | | user: test |
|
260 | 264 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
261 | 265 | | summary: modified A |
|
262 | 266 | | |
|
263 | 267 | | * changeset: 9:a67643f48146 |
|
264 | 268 | | | user: test |
|
265 | 269 | | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
266 | 270 | | | instability: orphan |
|
267 | 271 | | | summary: modified B |
|
268 | 272 | | | |
|
269 | 273 | | x changeset: 8:db79727cb2f7 |
|
270 | 274 | |/ parent: 6:0c6523cb1d0f |
|
271 | 275 | | user: test |
|
272 | 276 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
273 | 277 | | obsolete: rewritten using amend as 10:082be6c94150 |
|
274 | 278 | | summary: modified A |
|
275 | 279 | | |
|
276 | 280 | | o changeset: 7:d2c78c3a3e01 |
|
277 | 281 | | | parent: 5:876a60d024de |
|
278 | 282 | | | user: test |
|
279 | 283 | | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
280 | 284 | | | summary: modified 3 |
|
281 | 285 | | | |
|
282 | 286 | o | changeset: 6:0c6523cb1d0f |
|
283 | 287 | |/ user: test |
|
284 | 288 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
285 | 289 | | summary: modified 2 |
|
286 | 290 | | |
|
287 | 291 | o changeset: 5:876a60d024de |
|
288 | 292 | | parent: 0:5cbade24e0fa |
|
289 | 293 | | user: test |
|
290 | 294 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
291 | 295 | | summary: modified 1 |
|
292 | 296 | | |
|
293 | 297 | o changeset: 0:5cbade24e0fa |
|
294 | 298 | user: test |
|
295 | 299 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
296 | 300 | summary: added |
|
297 | 301 | |
|
298 | 302 | $ hg phabsend -r 5:: |
|
299 | 303 | abort: obsolete commits cannot be posted for review |
|
300 | 304 | [255] |
|
301 | 305 | |
|
302 | 306 | Don't restack existing orphans |
|
303 | 307 | |
|
304 | 308 | $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json" |
|
305 | 309 | 876a60d024de mapped to old nodes ['876a60d024de'] |
|
306 | 310 | 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f'] |
|
307 | 311 | D8434 - updated - 876a60d024de: modified 1 |
|
308 | 312 | D8433 - updated - 0c6523cb1d0f: modified 2 |
|
309 | 313 | D8435 - created - 082be6c94150: modified A |
|
310 | 314 | new commits: ['b5913193c805'] |
|
311 | 315 | not restabilizing unchanged d2c78c3a3e01 |
|
312 | 316 | $ hg log -G |
|
313 | 317 | @ changeset: 11:b5913193c805 |
|
314 | 318 | | tag: tip |
|
315 | 319 | | parent: 6:0c6523cb1d0f |
|
316 | 320 | | user: test |
|
317 | 321 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
318 | 322 | | summary: modified A |
|
319 | 323 | | |
|
320 | 324 | | * changeset: 9:a67643f48146 |
|
321 | 325 | | | user: test |
|
322 | 326 | | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
323 | 327 | | | instability: orphan |
|
324 | 328 | | | summary: modified B |
|
325 | 329 | | | |
|
326 | 330 | | x changeset: 8:db79727cb2f7 |
|
327 | 331 | |/ parent: 6:0c6523cb1d0f |
|
328 | 332 | | user: test |
|
329 | 333 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
330 | 334 | | obsolete: rewritten using amend, phabsend as 11:b5913193c805 |
|
331 | 335 | | summary: modified A |
|
332 | 336 | | |
|
333 | 337 | | o changeset: 7:d2c78c3a3e01 |
|
334 | 338 | | | parent: 5:876a60d024de |
|
335 | 339 | | | user: test |
|
336 | 340 | | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
337 | 341 | | | summary: modified 3 |
|
338 | 342 | | | |
|
339 | 343 | o | changeset: 6:0c6523cb1d0f |
|
340 | 344 | |/ user: test |
|
341 | 345 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
342 | 346 | | summary: modified 2 |
|
343 | 347 | | |
|
344 | 348 | o changeset: 5:876a60d024de |
|
345 | 349 | | parent: 0:5cbade24e0fa |
|
346 | 350 | | user: test |
|
347 | 351 | | date: Thu Jan 01 00:00:00 1970 +0000 |
|
348 | 352 | | summary: modified 1 |
|
349 | 353 | | |
|
350 | 354 | o changeset: 0:5cbade24e0fa |
|
351 | 355 | user: test |
|
352 | 356 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
353 | 357 | summary: added |
|
354 | 358 | |
|
355 | 359 | $ cd .. |
|
356 | 360 | |
|
357 | 361 | Phabesending a new binary, a modified binary, and a removed binary |
|
358 | 362 | |
|
359 | 363 | >>> open('bin', 'wb').write(b'\0a') and None |
|
360 | 364 | $ hg ci -Am 'add binary' |
|
361 | 365 | adding bin |
|
362 | 366 | >>> open('bin', 'wb').write(b'\0b') and None |
|
363 | 367 | $ hg ci -m 'modify binary' |
|
364 | 368 | $ hg rm bin |
|
365 | 369 | $ hg ci -m 'remove binary' |
|
366 | 370 | $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json" |
|
367 | 371 | uploading bin@aa24a81f55de |
|
368 | 372 | D8007 - created - aa24a81f55de: add binary |
|
369 | 373 | uploading bin@d8d62a881b54 |
|
370 | 374 | D8008 - created - d8d62a881b54: modify binary |
|
371 | 375 | D8009 - created - af55645b2e29: remove binary |
|
372 | 376 | new commits: ['b8139fbb4a57'] |
|
373 | 377 | new commits: ['c88ce4c2d2ad'] |
|
374 | 378 | new commits: ['75dbbc901145'] |
|
375 | 379 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg |
|
376 | 380 | |
|
377 | 381 | Phabsend a renamed binary and a copied binary, with and without content changes |
|
378 | 382 | to src and dest |
|
379 | 383 | |
|
380 | 384 | >>> open('bin2', 'wb').write(b'\0c') and None |
|
381 | 385 | $ hg ci -Am 'add another binary' |
|
382 | 386 | adding bin2 |
|
383 | 387 | |
|
384 | 388 | TODO: "bin2" can't be viewed in this commit (left or right side), and the URL |
|
385 | 389 | looks much different than when viewing "bin2_moved". No idea if this is a phab |
|
386 | 390 | bug, or phabsend bug. The patch (as printed by phabread) look reasonable |
|
387 | 391 | though. |
|
388 | 392 | |
|
389 | 393 | $ hg mv bin2 bin2_moved |
|
390 | 394 | $ hg ci -m "moved binary" |
|
391 | 395 | |
|
392 | 396 | Note: "bin2_moved" is also not viewable in phabricator with this review |
|
393 | 397 | |
|
394 | 398 | $ hg cp bin2_moved bin2_copied |
|
395 | 399 | $ hg ci -m "copied binary" |
|
396 | 400 | |
|
397 | 401 | Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it |
|
398 | 402 | are viewable in their proper state. "bin2_copied" is not viewable, and not |
|
399 | 403 | listed as binary in phabricator. |
|
400 | 404 | |
|
401 | 405 | >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None |
|
402 | 406 | $ hg mv bin2_copied bin2_moved_again |
|
403 | 407 | $ hg ci -m "move+mod copied binary" |
|
404 | 408 | |
|
405 | 409 | Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both |
|
406 | 410 | viewable on each side. |
|
407 | 411 | |
|
408 | 412 | >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None |
|
409 | 413 | $ hg cp bin2_moved bin2_moved_copied |
|
410 | 414 | >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None |
|
411 | 415 | $ hg ci -m "copy+mod moved binary" |
|
412 | 416 | |
|
413 | 417 | $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json" |
|
414 | 418 | uploading bin2@f42f9195e00c |
|
415 | 419 | D8128 - created - f42f9195e00c: add another binary |
|
416 | 420 | D8129 - created - 834ab31d80ae: moved binary |
|
417 | 421 | D8130 - created - 494b750e5194: copied binary |
|
418 | 422 | uploading bin2_moved_again@25f766b50cc2 |
|
419 | 423 | D8131 - created - 25f766b50cc2: move+mod copied binary |
|
420 | 424 | uploading bin2_moved_copied@1b87b363a5e4 |
|
421 | 425 | uploading bin2_moved@1b87b363a5e4 |
|
422 | 426 | D8132 - created - 1b87b363a5e4: copy+mod moved binary |
|
423 | 427 | new commits: ['90437c20312a'] |
|
424 | 428 | new commits: ['f391f4da4c61'] |
|
425 | 429 | new commits: ['da86a9f3268c'] |
|
426 | 430 | new commits: ['003ffc16ba66'] |
|
427 | 431 | new commits: ['13bd750c36fa'] |
|
428 | 432 | saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg |
|
429 | 433 | |
|
430 | 434 | Phabreading a DREV with a local:commits time as a string: |
|
431 | 435 | $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285 |
|
432 | 436 | # HG changeset patch |
|
433 | 437 | # User Pulkit Goyal <7895pulkit@gmail.com> |
|
434 | 438 | # Date 1509404054 -19800 |
|
435 | 439 | # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5 |
|
436 | 440 | # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35 |
|
437 | 441 | repoview: add a new attribute _visibilityexceptions and related API |
|
438 | 442 | |
|
439 | 443 | Currently we don't have a defined way in core to make some hidden revisions |
|
440 | 444 | visible in filtered repo. Extensions to achieve the purpose of unhiding some |
|
441 | 445 | hidden commits, wrap repoview.pinnedrevs() function. |
|
442 | 446 | |
|
443 | 447 | To make the above task simple and have well defined API, this patch adds a new |
|
444 | 448 | attribute '_visibilityexceptions' to repoview class which will contains |
|
445 | 449 | the hidden revs which should be exception. |
|
446 | 450 | This will allow to set different exceptions for different repoview objects |
|
447 | 451 | backed by the same unfiltered repo. |
|
448 | 452 | |
|
449 | 453 | This patch also adds API to add revs to the attribute set and get them. |
|
450 | 454 | |
|
451 | 455 | Thanks to Jun for suggesting the use of repoview class instead of localrepo. |
|
452 | 456 | |
|
453 | 457 | Differential Revision: https://phab.mercurial-scm.org/D1285 |
|
454 | 458 | diff --git a/mercurial/repoview.py b/mercurial/repoview.py |
|
455 | 459 | --- a/mercurial/repoview.py |
|
456 | 460 | +++ b/mercurial/repoview.py |
|
457 | 461 | @@ * @@ (glob) |
|
458 | 462 | subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`. |
|
459 | 463 | """ |
|
460 | 464 | |
|
461 | 465 | + # hidden revs which should be visible |
|
462 | 466 | + _visibilityexceptions = set() |
|
463 | 467 | + |
|
464 | 468 | def __init__(self, repo, filtername): |
|
465 | 469 | object.__setattr__(self, r'_unfilteredrepo', repo) |
|
466 | 470 | object.__setattr__(self, r'filtername', filtername) |
|
467 | 471 | @@ -231,6 +234,14 @@ |
|
468 | 472 | return self |
|
469 | 473 | return self.unfiltered().filtered(name) |
|
470 | 474 | |
|
471 | 475 | + def addvisibilityexceptions(self, revs): |
|
472 | 476 | + """adds hidden revs which should be visible to set of exceptions""" |
|
473 | 477 | + self._visibilityexceptions.update(revs) |
|
474 | 478 | + |
|
475 | 479 | + def getvisibilityexceptions(self): |
|
476 | 480 | + """returns the set of hidden revs which should be visible""" |
|
477 | 481 | + return self._visibilityexceptions |
|
478 | 482 | + |
|
479 | 483 | # everything access are forwarded to the proxied repo |
|
480 | 484 | def __getattr__(self, attr): |
|
481 | 485 | return getattr(self._unfilteredrepo, attr) |
|
482 | 486 | diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py |
|
483 | 487 | --- a/mercurial/localrepo.py |
|
484 | 488 | +++ b/mercurial/localrepo.py |
|
485 | 489 | @@ -570,6 +570,14 @@ |
|
486 | 490 | def close(self): |
|
487 | 491 | self._writecaches() |
|
488 | 492 | |
|
489 | 493 | + def addvisibilityexceptions(self, exceptions): |
|
490 | 494 | + # should be called on a filtered repository |
|
491 | 495 | + pass |
|
492 | 496 | + |
|
493 | 497 | + def getvisibilityexceptions(self): |
|
494 | 498 | + # should be called on a filtered repository |
|
495 | 499 | + return set() |
|
496 | 500 | + |
|
497 | 501 | def _loadextensions(self): |
|
498 | 502 | extensions.loadall(self.ui) |
|
499 | 503 | |
|
500 | 504 | |
|
501 | 505 | A bad .arcconfig doesn't error out |
|
502 | 506 | $ echo 'garbage' > .arcconfig |
|
503 | 507 | $ hg config phabricator --debug |
|
504 | 508 | invalid JSON in $TESTTMP/repo/.arcconfig |
|
505 | 509 | read config from: */.hgrc (glob) |
|
506 | 510 | */.hgrc:*: phabricator.debug=True (glob) |
|
507 | 511 | $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob) |
|
508 | 512 | $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob) |
|
509 | 513 | |
|
510 | 514 | The .arcconfig content overrides global config |
|
511 | 515 | $ cat >> $HGRCPATH << EOF |
|
512 | 516 | > [phabricator] |
|
513 | 517 | > url = global |
|
514 | 518 | > callsign = global |
|
515 | 519 | > EOF |
|
516 | 520 | $ cp $TESTDIR/../.arcconfig . |
|
517 | 521 | $ mv .hg/hgrc .hg/hgrc.bak |
|
518 | 522 | $ hg config phabricator --debug |
|
519 | 523 | read config from: */.hgrc (glob) |
|
520 | 524 | */.hgrc:*: phabricator.debug=True (glob) |
|
521 | 525 | $TESTTMP/repo/.arcconfig: phabricator.callsign=HG |
|
522 | 526 | $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/ |
|
523 | 527 | |
|
524 | 528 | But it doesn't override local config |
|
525 | 529 | $ cat >> .hg/hgrc << EOF |
|
526 | 530 | > [phabricator] |
|
527 | 531 | > url = local |
|
528 | 532 | > callsign = local |
|
529 | 533 | > EOF |
|
530 | 534 | $ hg config phabricator --debug |
|
531 | 535 | read config from: */.hgrc (glob) |
|
532 | 536 | */.hgrc:*: phabricator.debug=True (glob) |
|
533 | 537 | $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob) |
|
534 | 538 | $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob) |
|
535 | 539 | $ mv .hg/hgrc.bak .hg/hgrc |
|
536 | 540 | |
|
537 | 541 | Phabimport works with a stack |
|
538 | 542 | |
|
539 | 543 | $ cd .. |
|
540 | 544 | $ hg clone repo repo2 -qr 1 |
|
541 | 545 | $ cp repo/.hg/hgrc repo2/.hg/ |
|
542 | 546 | $ cd repo2 |
|
543 | 547 | $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" |
|
544 | 548 | applying patch from D7917 |
|
545 | 549 | applying patch from D7918 |
|
546 | 550 | $ hg log -r .: -G -Tcompact |
|
547 | 551 | o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test |
|
548 | 552 | | create draft change for phabricator testing |
|
549 | 553 | | |
|
550 | 554 | o 2 8de3712202d1 1970-01-01 00:00 +0000 test |
|
551 | 555 | | create public change for phabricator testing |
|
552 | 556 | | |
|
553 | 557 | @ 1 a692622e6937 1970-01-01 00:00 +0000 test |
|
554 | 558 | | create beta for phabricator test |
|
555 | 559 | ~ |
|
556 | 560 | Phabimport can create secret commits |
|
557 | 561 | |
|
558 | 562 | $ hg rollback --config ui.rollback=True |
|
559 | 563 | repository tip rolled back to revision 1 (undo phabimport) |
|
560 | 564 | $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \ |
|
561 | 565 | > --config phabimport.secret=True |
|
562 | 566 | applying patch from D7917 |
|
563 | 567 | applying patch from D7918 |
|
564 | 568 | $ hg log -r 'reverse(.:)' -T phases |
|
565 | 569 | changeset: 3:aaef04066140 |
|
566 | 570 | tag: tip |
|
567 | 571 | phase: secret |
|
568 | 572 | user: test |
|
569 | 573 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
570 | 574 | summary: create draft change for phabricator testing |
|
571 | 575 | |
|
572 | 576 | changeset: 2:8de3712202d1 |
|
573 | 577 | phase: secret |
|
574 | 578 | user: test |
|
575 | 579 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
576 | 580 | summary: create public change for phabricator testing |
|
577 | 581 | |
|
578 | 582 | changeset: 1:a692622e6937 |
|
579 | 583 | phase: public |
|
580 | 584 | user: test |
|
581 | 585 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
582 | 586 | summary: create beta for phabricator test |
|
583 | 587 | |
|
584 | 588 | Phabimport accepts multiple DREVSPECs |
|
585 | 589 | |
|
586 | 590 | $ hg rollback --config ui.rollback=True |
|
587 | 591 | repository tip rolled back to revision 1 (undo phabimport) |
|
588 | 592 | $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json" |
|
589 | 593 | applying patch from D7917 |
|
590 | 594 | applying patch from D7918 |
|
591 | 595 | |
|
592 | 596 | Phabsend requires a linear range of commits |
|
593 | 597 | |
|
594 | 598 | $ hg phabsend -r 0+2+3 |
|
595 | 599 | abort: cannot phabsend multiple head revisions: c44b38f24a45 |
|
596 | 600 | (the revisions must form a linear chain) |
|
597 | 601 | [255] |
|
598 | 602 | |
|
599 | 603 | Validate arguments with --fold |
|
600 | 604 | |
|
601 | 605 | $ hg phabsend --fold -r 1 |
|
602 | 606 | abort: cannot fold a single revision |
|
603 | 607 | [255] |
|
604 | 608 | $ hg phabsend --fold --no-amend -r 1:: |
|
605 | 609 | abort: cannot fold with --no-amend |
|
606 | 610 | [255] |
|
607 | 611 | $ hg phabsend --fold -r 1:: |
|
608 | 612 | abort: cannot fold revisions with different DREV values |
|
609 | 613 | [255] |
|
610 | 614 | |
|
611 | 615 | Setup a series of commits to be folded, and include the Test Plan field multiple |
|
612 | 616 | times to test the concatenation logic. No Test Plan field in the last one to |
|
613 | 617 | ensure missing fields are skipped. |
|
614 | 618 | |
|
615 | 619 | $ hg init ../folded |
|
616 | 620 | $ cd ../folded |
|
617 | 621 | $ cat >> .hg/hgrc <<EOF |
|
618 | 622 | > [phabricator] |
|
619 | 623 | > url = https://phab.mercurial-scm.org/ |
|
620 | 624 | > callsign = HG |
|
621 | 625 | > EOF |
|
622 | 626 | |
|
623 | 627 | $ echo 'added' > file.txt |
|
624 | 628 | $ hg ci -Aqm 'added file' |
|
625 | 629 | |
|
626 | 630 | $ cat > log.txt <<EOF |
|
627 | 631 | > one: first commit to review |
|
628 | 632 | > |
|
629 | 633 | > This file was modified with 'mod1' as its contents. |
|
630 | 634 | > |
|
631 | 635 | > Test Plan: |
|
632 | 636 | > LOL! What testing?! |
|
633 | 637 | > EOF |
|
634 | 638 | $ echo mod1 > file.txt |
|
635 | 639 | $ hg ci -l log.txt |
|
636 | 640 | |
|
637 | 641 | $ cat > log.txt <<EOF |
|
638 | 642 | > two: second commit to review |
|
639 | 643 | > |
|
640 | 644 | > This file was modified with 'mod2' as its contents. |
|
641 | 645 | > |
|
642 | 646 | > Test Plan: |
|
643 | 647 | > Haha! yeah, right. |
|
644 | 648 | > |
|
645 | 649 | > EOF |
|
646 | 650 | $ echo mod2 > file.txt |
|
647 | 651 | $ hg ci -l log.txt |
|
648 | 652 | |
|
649 | 653 | $ echo mod3 > file.txt |
|
650 | 654 | $ hg ci -m '3: a commit with no detailed message' |
|
651 | 655 | |
|
652 | 656 | The folding of immutable commits works... |
|
653 | 657 | |
|
654 | 658 | $ hg phase -r tip --public |
|
655 | 659 | $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json" |
|
656 | 660 | D8386 - created - a959a3f69d8d: one: first commit to review |
|
657 | 661 | D8386 - created - 24a4438154ba: two: second commit to review |
|
658 | 662 | D8386 - created - d235829e802c: 3: a commit with no detailed message |
|
659 | 663 | warning: not updating public commit 1:a959a3f69d8d |
|
660 | 664 | warning: not updating public commit 2:24a4438154ba |
|
661 | 665 | warning: not updating public commit 3:d235829e802c |
|
662 | 666 | no newnodes to update |
|
663 | 667 | |
|
664 | 668 | $ hg phase -r 0 --draft --force |
|
665 | 669 | |
|
666 | 670 | ... as does the initial mutable fold... |
|
667 | 671 | |
|
668 | 672 | $ echo y | hg phabsend --fold --confirm -r 1:: \ |
|
669 | 673 | > --test-vcr "$VCR/phabsend-fold-initial.json" |
|
670 | 674 | NEW - a959a3f69d8d: one: first commit to review |
|
671 | 675 | NEW - 24a4438154ba: two: second commit to review |
|
672 | 676 | NEW - d235829e802c: 3: a commit with no detailed message |
|
673 | 677 | Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y |
|
674 | 678 | D8387 - created - a959a3f69d8d: one: first commit to review |
|
675 | 679 | D8387 - created - 24a4438154ba: two: second commit to review |
|
676 | 680 | D8387 - created - d235829e802c: 3: a commit with no detailed message |
|
677 | 681 | updating local commit list for D8387 |
|
678 | 682 | new commits: ['602c4e738243', '832553266fe8', '921f8265efbd'] |
|
679 | 683 | saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg |
|
680 | 684 | |
|
681 | 685 | ... and doesn't mangle the local commits. |
|
682 | 686 | |
|
683 | 687 | $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n' |
|
684 | 688 | 3:921f8265efbd |
|
685 | 689 | 3: a commit with no detailed message |
|
686 | 690 | |
|
687 | 691 | Differential Revision: https://phab.mercurial-scm.org/D8387 |
|
688 | 692 | 2:832553266fe8 |
|
689 | 693 | two: second commit to review |
|
690 | 694 | |
|
691 | 695 | This file was modified with 'mod2' as its contents. |
|
692 | 696 | |
|
693 | 697 | Test Plan: |
|
694 | 698 | Haha! yeah, right. |
|
695 | 699 | |
|
696 | 700 | Differential Revision: https://phab.mercurial-scm.org/D8387 |
|
697 | 701 | 1:602c4e738243 |
|
698 | 702 | one: first commit to review |
|
699 | 703 | |
|
700 | 704 | This file was modified with 'mod1' as its contents. |
|
701 | 705 | |
|
702 | 706 | Test Plan: |
|
703 | 707 | LOL! What testing?! |
|
704 | 708 | |
|
705 | 709 | Differential Revision: https://phab.mercurial-scm.org/D8387 |
|
706 | 710 | 0:98d480e0d494 |
|
707 | 711 | added file |
|
708 | 712 | |
|
709 | 713 | Setup some obsmarkers by adding a file to the middle commit. This stress tests |
|
710 | 714 | getoldnodedrevmap() in later phabsends. |
|
711 | 715 | |
|
712 | 716 | $ hg up '.^' |
|
713 | 717 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
714 | 718 | $ echo 'modified' > file2.txt |
|
715 | 719 | $ hg add file2.txt |
|
716 | 720 | $ hg amend --config experimental.evolution=all --config extensions.amend= |
|
717 | 721 | 1 new orphan changesets |
|
718 | 722 | $ hg up 3 |
|
719 | 723 | obsolete feature not enabled but 1 markers found! |
|
720 | 724 | 1 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
721 | 725 | $ hg rebase --config experimental.evolution=all --config extensions.rebase= |
|
722 | 726 | note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip) |
|
723 | 727 | rebasing 3:921f8265efbd "3: a commit with no detailed message" |
|
724 | 728 | |
|
725 | 729 | When commits have changed locally, the local commit list on Phabricator is |
|
726 | 730 | updated. |
|
727 | 731 | |
|
728 | 732 | $ echo y | hg phabsend --fold --confirm -r 1:: \ |
|
729 | 733 | > --test-vcr "$VCR/phabsend-fold-updated.json" |
|
730 | 734 | obsolete feature not enabled but 2 markers found! |
|
731 | 735 | 602c4e738243 mapped to old nodes ['602c4e738243'] |
|
732 | 736 | 0124e5474c88 mapped to old nodes ['832553266fe8'] |
|
733 | 737 | e4edb1fe3565 mapped to old nodes ['921f8265efbd'] |
|
734 | 738 | D8387 - 602c4e738243: one: first commit to review |
|
735 | 739 | D8387 - 0124e5474c88: two: second commit to review |
|
736 | 740 | D8387 - e4edb1fe3565: 3: a commit with no detailed message |
|
737 | 741 | Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y |
|
738 | 742 | D8387 - updated - 602c4e738243: one: first commit to review |
|
739 | 743 | D8387 - updated - 0124e5474c88: two: second commit to review |
|
740 | 744 | D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message |
|
741 | 745 | obsolete feature not enabled but 2 markers found! (?) |
|
742 | 746 | updating local commit list for D8387 |
|
743 | 747 | new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565'] |
|
744 | 748 | $ hg log -Tcompact |
|
745 | 749 | obsolete feature not enabled but 2 markers found! |
|
746 | 750 | 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test |
|
747 | 751 | 3: a commit with no detailed message |
|
748 | 752 | |
|
749 | 753 | 4:1 0124e5474c88 1970-01-01 00:00 +0000 test |
|
750 | 754 | two: second commit to review |
|
751 | 755 | |
|
752 | 756 | 1 602c4e738243 1970-01-01 00:00 +0000 test |
|
753 | 757 | one: first commit to review |
|
754 | 758 | |
|
755 | 759 | 0 98d480e0d494 1970-01-01 00:00 +0000 test |
|
756 | 760 | added file |
|
757 | 761 | |
|
758 | 762 | When nothing has changed locally since the last phabsend, the commit list isn't |
|
759 | 763 | updated, and nothing is changed locally afterward. |
|
760 | 764 | |
|
761 | 765 | $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json" |
|
762 | 766 | obsolete feature not enabled but 2 markers found! |
|
763 | 767 | 602c4e738243 mapped to old nodes ['602c4e738243'] |
|
764 | 768 | 0124e5474c88 mapped to old nodes ['0124e5474c88'] |
|
765 | 769 | e4edb1fe3565 mapped to old nodes ['e4edb1fe3565'] |
|
766 | 770 | D8387 - updated - 602c4e738243: one: first commit to review |
|
767 | 771 | D8387 - updated - 0124e5474c88: two: second commit to review |
|
768 | 772 | D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message |
|
769 | 773 | obsolete feature not enabled but 2 markers found! (?) |
|
770 | 774 | local commit list for D8387 is already up-to-date |
|
771 | 775 | $ hg log -Tcompact |
|
772 | 776 | obsolete feature not enabled but 2 markers found! |
|
773 | 777 | 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test |
|
774 | 778 | 3: a commit with no detailed message |
|
775 | 779 | |
|
776 | 780 | 4:1 0124e5474c88 1970-01-01 00:00 +0000 test |
|
777 | 781 | two: second commit to review |
|
778 | 782 | |
|
779 | 783 | 1 602c4e738243 1970-01-01 00:00 +0000 test |
|
780 | 784 | one: first commit to review |
|
781 | 785 | |
|
782 | 786 | 0 98d480e0d494 1970-01-01 00:00 +0000 test |
|
783 | 787 | added file |
|
784 | 788 | |
|
785 | 789 | Fold will accept new revisions at the end... |
|
786 | 790 | |
|
787 | 791 | $ echo 'another mod' > file2.txt |
|
788 | 792 | $ hg ci -m 'four: extend the fold range' |
|
789 | 793 | obsolete feature not enabled but 2 markers found! |
|
790 | 794 | $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \ |
|
791 | 795 | > --config experimental.evolution=all |
|
792 | 796 | 602c4e738243 mapped to old nodes ['602c4e738243'] |
|
793 | 797 | 0124e5474c88 mapped to old nodes ['0124e5474c88'] |
|
794 | 798 | e4edb1fe3565 mapped to old nodes ['e4edb1fe3565'] |
|
795 | 799 | D8387 - updated - 602c4e738243: one: first commit to review |
|
796 | 800 | D8387 - updated - 0124e5474c88: two: second commit to review |
|
797 | 801 | D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message |
|
798 | 802 | D8387 - created - 94aaae213b23: four: extend the fold range |
|
799 | 803 | updating local commit list for D8387 |
|
800 | 804 | new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707'] |
|
801 | 805 | $ hg log -r . -T '{desc}\n' |
|
802 | 806 | four: extend the fold range |
|
803 | 807 | |
|
804 | 808 | Differential Revision: https://phab.mercurial-scm.org/D8387 |
|
805 | 809 | $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1:: |
|
806 | 810 | obsolete feature not enabled but 3 markers found! |
|
807 | 811 | 1 https://phab.mercurial-scm.org/D8387 D8387 |
|
808 | 812 | 4 https://phab.mercurial-scm.org/D8387 D8387 |
|
809 | 813 | 5 https://phab.mercurial-scm.org/D8387 D8387 |
|
810 | 814 | 7 https://phab.mercurial-scm.org/D8387 D8387 |
|
811 | 815 | |
|
812 | 816 | ... and also accepts new revisions at the beginning of the range |
|
813 | 817 | |
|
814 | 818 | It's a bit unfortunate that not having a Differential URL on the first commit |
|
815 | 819 | causes a new Differential Revision to be created, though it isn't *entirely* |
|
816 | 820 | unreasonable. At least this updates the subsequent commits. |
|
817 | 821 | |
|
818 | 822 | TODO: See if it can reuse the existing Differential. |
|
819 | 823 | |
|
820 | 824 | $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \ |
|
821 | 825 | > --config experimental.evolution=all |
|
822 | 826 | 602c4e738243 mapped to old nodes ['602c4e738243'] |
|
823 | 827 | 0124e5474c88 mapped to old nodes ['0124e5474c88'] |
|
824 | 828 | e4edb1fe3565 mapped to old nodes ['e4edb1fe3565'] |
|
825 | 829 | 51a04fea8707 mapped to old nodes ['51a04fea8707'] |
|
826 | 830 | D8388 - created - 98d480e0d494: added file |
|
827 | 831 | D8388 - updated - 602c4e738243: one: first commit to review |
|
828 | 832 | D8388 - updated - 0124e5474c88: two: second commit to review |
|
829 | 833 | D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message |
|
830 | 834 | D8388 - updated - 51a04fea8707: four: extend the fold range |
|
831 | 835 | updating local commit list for D8388 |
|
832 | 836 | new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991'] |
|
833 | 837 | |
|
834 | 838 | $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n' |
|
835 | 839 | obsolete feature not enabled but 8 markers found! |
|
836 | 840 | 12:ac7db67f0991 |
|
837 | 841 | four: extend the fold range |
|
838 | 842 | |
|
839 | 843 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
840 | 844 | 11:30682b960804 |
|
841 | 845 | 3: a commit with no detailed message |
|
842 | 846 | |
|
843 | 847 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
844 | 848 | 10:3ee132d41dbc |
|
845 | 849 | two: second commit to review |
|
846 | 850 | |
|
847 | 851 | This file was modified with 'mod2' as its contents. |
|
848 | 852 | |
|
849 | 853 | Test Plan: |
|
850 | 854 | Haha! yeah, right. |
|
851 | 855 | |
|
852 | 856 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
853 | 857 | 9:6320b7d714cf |
|
854 | 858 | one: first commit to review |
|
855 | 859 | |
|
856 | 860 | This file was modified with 'mod1' as its contents. |
|
857 | 861 | |
|
858 | 862 | Test Plan: |
|
859 | 863 | LOL! What testing?! |
|
860 | 864 | |
|
861 | 865 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
862 | 866 | 8:15e9b14b4b4c |
|
863 | 867 | added file |
|
864 | 868 | |
|
865 | 869 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
866 | 870 | |
|
867 | 871 | Test phabsend --fold with an `hg split` at the end of the range |
|
868 | 872 | |
|
869 | 873 | $ echo foo > file3.txt |
|
870 | 874 | $ hg add file3.txt |
|
871 | 875 | |
|
872 | 876 | $ hg log -r . -T '{desc}' > log.txt |
|
873 | 877 | $ echo 'amended mod' > file2.txt |
|
874 | 878 | $ hg ci --amend -l log.txt --config experimental.evolution=all |
|
875 | 879 | |
|
876 | 880 | $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \ |
|
877 | 881 | > --config experimental.evolution=all split -r . |
|
878 | 882 | > n |
|
879 | 883 | > y |
|
880 | 884 | > y |
|
881 | 885 | > y |
|
882 | 886 | > y |
|
883 | 887 | > EOF |
|
884 | 888 | diff --git a/file2.txt b/file2.txt |
|
885 | 889 | 1 hunks, 1 lines changed |
|
886 | 890 | examine changes to 'file2.txt'? |
|
887 | 891 | (enter ? for help) [Ynesfdaq?] n |
|
888 | 892 | |
|
889 | 893 | diff --git a/file3.txt b/file3.txt |
|
890 | 894 | new file mode 100644 |
|
891 | 895 | examine changes to 'file3.txt'? |
|
892 | 896 | (enter ? for help) [Ynesfdaq?] y |
|
893 | 897 | |
|
894 | 898 | @@ -0,0 +1,1 @@ |
|
895 | 899 | +foo |
|
896 | 900 | record change 2/2 to 'file3.txt'? |
|
897 | 901 | (enter ? for help) [Ynesfdaq?] y |
|
898 | 902 | |
|
899 | 903 | created new head |
|
900 | 904 | diff --git a/file2.txt b/file2.txt |
|
901 | 905 | 1 hunks, 1 lines changed |
|
902 | 906 | examine changes to 'file2.txt'? |
|
903 | 907 | (enter ? for help) [Ynesfdaq?] y |
|
904 | 908 | |
|
905 | 909 | @@ -1,1 +1,1 @@ |
|
906 | 910 | -modified |
|
907 | 911 | +amended mod |
|
908 | 912 | record this change to 'file2.txt'? |
|
909 | 913 | (enter ? for help) [Ynesfdaq?] y |
|
910 | 914 | |
|
911 | 915 | $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \ |
|
912 | 916 | > --config experimental.evolution=all |
|
913 | 917 | 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c'] |
|
914 | 918 | 6320b7d714cf mapped to old nodes ['6320b7d714cf'] |
|
915 | 919 | 3ee132d41dbc mapped to old nodes ['3ee132d41dbc'] |
|
916 | 920 | 30682b960804 mapped to old nodes ['30682b960804'] |
|
917 | 921 | 6bc15dc99efd mapped to old nodes ['ac7db67f0991'] |
|
918 | 922 | b50946d5e490 mapped to old nodes ['ac7db67f0991'] |
|
919 | 923 | D8388 - updated - 15e9b14b4b4c: added file |
|
920 | 924 | D8388 - updated - 6320b7d714cf: one: first commit to review |
|
921 | 925 | D8388 - updated - 3ee132d41dbc: two: second commit to review |
|
922 | 926 | D8388 - updated - 30682b960804: 3: a commit with no detailed message |
|
923 | 927 | D8388 - updated - 6bc15dc99efd: four: extend the fold range |
|
924 | 928 | D8388 - updated - b50946d5e490: four: extend the fold range |
|
925 | 929 | updating local commit list for D8388 |
|
926 | 930 | new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490'] |
|
927 | 931 | |
|
928 | 932 | Test phabsend --fold with an `hg fold` at the end of the range |
|
929 | 933 | |
|
930 | 934 | $ hg --config experimental.evolution=all --config extensions.rebase= \ |
|
931 | 935 | > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt |
|
932 | 936 | rebasing 14:6bc15dc99efd "four: extend the fold range" |
|
933 | 937 | rebasing 15:b50946d5e490 "four: extend the fold range" (tip) |
|
934 | 938 | |
|
935 | 939 | $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \ |
|
936 | 940 | > --config experimental.evolution=all |
|
937 | 941 | 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c'] |
|
938 | 942 | 6320b7d714cf mapped to old nodes ['6320b7d714cf'] |
|
939 | 943 | 3ee132d41dbc mapped to old nodes ['3ee132d41dbc'] |
|
940 | 944 | 30682b960804 mapped to old nodes ['30682b960804'] |
|
941 | 945 | e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490'] |
|
942 | 946 | D8388 - updated - 15e9b14b4b4c: added file |
|
943 | 947 | D8388 - updated - 6320b7d714cf: one: first commit to review |
|
944 | 948 | D8388 - updated - 3ee132d41dbc: two: second commit to review |
|
945 | 949 | D8388 - updated - 30682b960804: 3: a commit with no detailed message |
|
946 | 950 | D8388 - updated - e919cdf3d4fe: four: extend the fold range |
|
947 | 951 | updating local commit list for D8388 |
|
948 | 952 | new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe'] |
|
949 | 953 | |
|
950 | 954 | $ hg log -r tip -v |
|
951 | 955 | obsolete feature not enabled but 12 markers found! |
|
952 | 956 | changeset: 16:e919cdf3d4fe |
|
953 | 957 | tag: tip |
|
954 | 958 | parent: 11:30682b960804 |
|
955 | 959 | user: test |
|
956 | 960 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
957 | 961 | files: file2.txt file3.txt |
|
958 | 962 | description: |
|
959 | 963 | four: extend the fold range |
|
960 | 964 | |
|
961 | 965 | Differential Revision: https://phab.mercurial-scm.org/D8388 |
|
962 | 966 | |
|
963 | 967 | |
|
964 | 968 | |
|
965 | 969 | $ cd .. |
General Comments 0
You need to be logged in to leave comments.
Login now