Show More
@@ -1,1895 +1,1900 b'' | |||||
1 | # phabricator.py - simple Phabricator integration |
|
1 | # phabricator.py - simple Phabricator integration | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Facebook, Inc. |
|
3 | # Copyright 2017 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | """simple Phabricator integration (EXPERIMENTAL) |
|
7 | """simple Phabricator integration (EXPERIMENTAL) | |
8 |
|
8 | |||
9 | This extension provides a ``phabsend`` command which sends a stack of |
|
9 | This extension provides a ``phabsend`` command which sends a stack of | |
10 | changesets to Phabricator, and a ``phabread`` command which prints a stack of |
|
10 | changesets to Phabricator, and a ``phabread`` command which prints a stack of | |
11 | revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command |
|
11 | revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command | |
12 | to update statuses in batch. |
|
12 | to update statuses in batch. | |
13 |
|
13 | |||
14 | A "phabstatus" view for :hg:`show` is also provided; it displays status |
|
14 | A "phabstatus" view for :hg:`show` is also provided; it displays status | |
15 | information of Phabricator differentials associated with unfinished |
|
15 | information of Phabricator differentials associated with unfinished | |
16 | changesets. |
|
16 | changesets. | |
17 |
|
17 | |||
18 | By default, Phabricator requires ``Test Plan`` which might prevent some |
|
18 | By default, Phabricator requires ``Test Plan`` which might prevent some | |
19 | changeset from being sent. The requirement could be disabled by changing |
|
19 | changeset from being sent. The requirement could be disabled by changing | |
20 | ``differential.require-test-plan-field`` config server side. |
|
20 | ``differential.require-test-plan-field`` config server side. | |
21 |
|
21 | |||
22 | Config:: |
|
22 | Config:: | |
23 |
|
23 | |||
24 | [phabricator] |
|
24 | [phabricator] | |
25 | # Phabricator URL |
|
25 | # Phabricator URL | |
26 | url = https://phab.example.com/ |
|
26 | url = https://phab.example.com/ | |
27 |
|
27 | |||
28 | # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its |
|
28 | # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its | |
29 | # callsign is "FOO". |
|
29 | # callsign is "FOO". | |
30 | callsign = FOO |
|
30 | callsign = FOO | |
31 |
|
31 | |||
32 | # curl command to use. If not set (default), use builtin HTTP library to |
|
32 | # curl command to use. If not set (default), use builtin HTTP library to | |
33 | # communicate. If set, use the specified curl command. This could be useful |
|
33 | # communicate. If set, use the specified curl command. This could be useful | |
34 | # if you need to specify advanced options that is not easily supported by |
|
34 | # if you need to specify advanced options that is not easily supported by | |
35 | # the internal library. |
|
35 | # the internal library. | |
36 | curlcmd = curl --connect-timeout 2 --retry 3 --silent |
|
36 | curlcmd = curl --connect-timeout 2 --retry 3 --silent | |
37 |
|
37 | |||
38 | [auth] |
|
38 | [auth] | |
39 | example.schemes = https |
|
39 | example.schemes = https | |
40 | example.prefix = phab.example.com |
|
40 | example.prefix = phab.example.com | |
41 |
|
41 | |||
42 | # API token. Get it from https://$HOST/conduit/login/ |
|
42 | # API token. Get it from https://$HOST/conduit/login/ | |
43 | example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx |
|
43 | example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx | |
44 | """ |
|
44 | """ | |
45 |
|
45 | |||
46 | from __future__ import absolute_import |
|
46 | from __future__ import absolute_import | |
47 |
|
47 | |||
48 | import base64 |
|
48 | import base64 | |
49 | import contextlib |
|
49 | import contextlib | |
50 | import hashlib |
|
50 | import hashlib | |
51 | import itertools |
|
51 | import itertools | |
52 | import json |
|
52 | import json | |
53 | import mimetypes |
|
53 | import mimetypes | |
54 | import operator |
|
54 | import operator | |
55 | import re |
|
55 | import re | |
56 |
|
56 | |||
57 | from mercurial.node import bin, nullid |
|
57 | from mercurial.node import bin, nullid | |
58 | from mercurial.i18n import _ |
|
58 | from mercurial.i18n import _ | |
59 | from mercurial.pycompat import getattr |
|
59 | from mercurial.pycompat import getattr | |
60 | from mercurial.thirdparty import attr |
|
60 | from mercurial.thirdparty import attr | |
61 | from mercurial import ( |
|
61 | from mercurial import ( | |
62 | cmdutil, |
|
62 | cmdutil, | |
63 | context, |
|
63 | context, | |
64 | encoding, |
|
64 | encoding, | |
65 | error, |
|
65 | error, | |
66 | exthelper, |
|
66 | exthelper, | |
67 | graphmod, |
|
67 | graphmod, | |
68 | httpconnection as httpconnectionmod, |
|
68 | httpconnection as httpconnectionmod, | |
69 | localrepo, |
|
69 | localrepo, | |
70 | logcmdutil, |
|
70 | logcmdutil, | |
71 | match, |
|
71 | match, | |
72 | mdiff, |
|
72 | mdiff, | |
73 | obsutil, |
|
73 | obsutil, | |
74 | parser, |
|
74 | parser, | |
75 | patch, |
|
75 | patch, | |
76 | phases, |
|
76 | phases, | |
77 | pycompat, |
|
77 | pycompat, | |
78 | scmutil, |
|
78 | scmutil, | |
79 | smartset, |
|
79 | smartset, | |
80 | tags, |
|
80 | tags, | |
81 | templatefilters, |
|
81 | templatefilters, | |
82 | templateutil, |
|
82 | templateutil, | |
83 | url as urlmod, |
|
83 | url as urlmod, | |
84 | util, |
|
84 | util, | |
85 | ) |
|
85 | ) | |
86 | from mercurial.utils import ( |
|
86 | from mercurial.utils import ( | |
87 | procutil, |
|
87 | procutil, | |
88 | stringutil, |
|
88 | stringutil, | |
89 | ) |
|
89 | ) | |
90 | from . import show |
|
90 | from . import show | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
93 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
94 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
94 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
95 | # be specifying the version(s) of Mercurial they are tested with, or |
|
95 | # be specifying the version(s) of Mercurial they are tested with, or | |
96 | # leave the attribute unspecified. |
|
96 | # leave the attribute unspecified. | |
97 | testedwith = b'ships-with-hg-core' |
|
97 | testedwith = b'ships-with-hg-core' | |
98 |
|
98 | |||
99 | eh = exthelper.exthelper() |
|
99 | eh = exthelper.exthelper() | |
100 |
|
100 | |||
101 | cmdtable = eh.cmdtable |
|
101 | cmdtable = eh.cmdtable | |
102 | command = eh.command |
|
102 | command = eh.command | |
103 | configtable = eh.configtable |
|
103 | configtable = eh.configtable | |
104 | templatekeyword = eh.templatekeyword |
|
104 | templatekeyword = eh.templatekeyword | |
105 | uisetup = eh.finaluisetup |
|
105 | uisetup = eh.finaluisetup | |
106 |
|
106 | |||
107 | # developer config: phabricator.batchsize |
|
107 | # developer config: phabricator.batchsize | |
108 | eh.configitem( |
|
108 | eh.configitem( | |
109 | b'phabricator', b'batchsize', default=12, |
|
109 | b'phabricator', b'batchsize', default=12, | |
110 | ) |
|
110 | ) | |
111 | eh.configitem( |
|
111 | eh.configitem( | |
112 | b'phabricator', b'callsign', default=None, |
|
112 | b'phabricator', b'callsign', default=None, | |
113 | ) |
|
113 | ) | |
114 | eh.configitem( |
|
114 | eh.configitem( | |
115 | b'phabricator', b'curlcmd', default=None, |
|
115 | b'phabricator', b'curlcmd', default=None, | |
116 | ) |
|
116 | ) | |
117 | # developer config: phabricator.repophid |
|
117 | # developer config: phabricator.repophid | |
118 | eh.configitem( |
|
118 | eh.configitem( | |
119 | b'phabricator', b'repophid', default=None, |
|
119 | b'phabricator', b'repophid', default=None, | |
120 | ) |
|
120 | ) | |
121 | eh.configitem( |
|
121 | eh.configitem( | |
122 | b'phabricator', b'url', default=None, |
|
122 | b'phabricator', b'url', default=None, | |
123 | ) |
|
123 | ) | |
124 | eh.configitem( |
|
124 | eh.configitem( | |
125 | b'phabsend', b'confirm', default=False, |
|
125 | b'phabsend', b'confirm', default=False, | |
126 | ) |
|
126 | ) | |
127 | eh.configitem( |
|
127 | eh.configitem( | |
128 | b'phabimport', b'secret', default=False, |
|
128 | b'phabimport', b'secret', default=False, | |
129 | ) |
|
129 | ) | |
|
130 | eh.configitem( | |||
|
131 | b'phabimport', b'obsolete', default=False, | |||
|
132 | ) | |||
130 |
|
133 | |||
131 | colortable = { |
|
134 | colortable = { | |
132 | b'phabricator.action.created': b'green', |
|
135 | b'phabricator.action.created': b'green', | |
133 | b'phabricator.action.skipped': b'magenta', |
|
136 | b'phabricator.action.skipped': b'magenta', | |
134 | b'phabricator.action.updated': b'magenta', |
|
137 | b'phabricator.action.updated': b'magenta', | |
135 | b'phabricator.desc': b'', |
|
138 | b'phabricator.desc': b'', | |
136 | b'phabricator.drev': b'bold', |
|
139 | b'phabricator.drev': b'bold', | |
137 | b'phabricator.node': b'', |
|
140 | b'phabricator.node': b'', | |
138 | b'phabricator.status.abandoned': b'magenta dim', |
|
141 | b'phabricator.status.abandoned': b'magenta dim', | |
139 | b'phabricator.status.accepted': b'green bold', |
|
142 | b'phabricator.status.accepted': b'green bold', | |
140 | b'phabricator.status.closed': b'green', |
|
143 | b'phabricator.status.closed': b'green', | |
141 | b'phabricator.status.needsreview': b'yellow', |
|
144 | b'phabricator.status.needsreview': b'yellow', | |
142 | b'phabricator.status.needsrevision': b'red', |
|
145 | b'phabricator.status.needsrevision': b'red', | |
143 | b'phabricator.status.changesplanned': b'red', |
|
146 | b'phabricator.status.changesplanned': b'red', | |
144 | } |
|
147 | } | |
145 |
|
148 | |||
146 | _VCR_FLAGS = [ |
|
149 | _VCR_FLAGS = [ | |
147 | ( |
|
150 | ( | |
148 | b'', |
|
151 | b'', | |
149 | b'test-vcr', |
|
152 | b'test-vcr', | |
150 | b'', |
|
153 | b'', | |
151 | _( |
|
154 | _( | |
152 | b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
|
155 | b'Path to a vcr file. If nonexistent, will record a new vcr transcript' | |
153 | b', otherwise will mock all http requests using the specified vcr file.' |
|
156 | b', otherwise will mock all http requests using the specified vcr file.' | |
154 | b' (ADVANCED)' |
|
157 | b' (ADVANCED)' | |
155 | ), |
|
158 | ), | |
156 | ), |
|
159 | ), | |
157 | ] |
|
160 | ] | |
158 |
|
161 | |||
159 |
|
162 | |||
160 | @eh.wrapfunction(localrepo, "loadhgrc") |
|
163 | @eh.wrapfunction(localrepo, "loadhgrc") | |
161 | def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements): |
|
164 | def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements): | |
162 | """Load ``.arcconfig`` content into a ui instance on repository open. |
|
165 | """Load ``.arcconfig`` content into a ui instance on repository open. | |
163 | """ |
|
166 | """ | |
164 | result = False |
|
167 | result = False | |
165 | arcconfig = {} |
|
168 | arcconfig = {} | |
166 |
|
169 | |||
167 | try: |
|
170 | try: | |
168 | # json.loads only accepts bytes from 3.6+ |
|
171 | # json.loads only accepts bytes from 3.6+ | |
169 | rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig")) |
|
172 | rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig")) | |
170 | # json.loads only returns unicode strings |
|
173 | # json.loads only returns unicode strings | |
171 | arcconfig = pycompat.rapply( |
|
174 | arcconfig = pycompat.rapply( | |
172 | lambda x: encoding.unitolocal(x) |
|
175 | lambda x: encoding.unitolocal(x) | |
173 | if isinstance(x, pycompat.unicode) |
|
176 | if isinstance(x, pycompat.unicode) | |
174 | else x, |
|
177 | else x, | |
175 | pycompat.json_loads(rawparams), |
|
178 | pycompat.json_loads(rawparams), | |
176 | ) |
|
179 | ) | |
177 |
|
180 | |||
178 | result = True |
|
181 | result = True | |
179 | except ValueError: |
|
182 | except ValueError: | |
180 | ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig")) |
|
183 | ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig")) | |
181 | except IOError: |
|
184 | except IOError: | |
182 | pass |
|
185 | pass | |
183 |
|
186 | |||
184 | cfg = util.sortdict() |
|
187 | cfg = util.sortdict() | |
185 |
|
188 | |||
186 | if b"repository.callsign" in arcconfig: |
|
189 | if b"repository.callsign" in arcconfig: | |
187 | cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"] |
|
190 | cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"] | |
188 |
|
191 | |||
189 | if b"phabricator.uri" in arcconfig: |
|
192 | if b"phabricator.uri" in arcconfig: | |
190 | cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"] |
|
193 | cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"] | |
191 |
|
194 | |||
192 | if cfg: |
|
195 | if cfg: | |
193 | ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig")) |
|
196 | ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig")) | |
194 |
|
197 | |||
195 | return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc |
|
198 | return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc | |
196 |
|
199 | |||
197 |
|
200 | |||
198 | def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): |
|
201 | def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): | |
199 | fullflags = flags + _VCR_FLAGS |
|
202 | fullflags = flags + _VCR_FLAGS | |
200 |
|
203 | |||
201 | def hgmatcher(r1, r2): |
|
204 | def hgmatcher(r1, r2): | |
202 | if r1.uri != r2.uri or r1.method != r2.method: |
|
205 | if r1.uri != r2.uri or r1.method != r2.method: | |
203 | return False |
|
206 | return False | |
204 | r1params = util.urlreq.parseqs(r1.body) |
|
207 | r1params = util.urlreq.parseqs(r1.body) | |
205 | r2params = util.urlreq.parseqs(r2.body) |
|
208 | r2params = util.urlreq.parseqs(r2.body) | |
206 | for key in r1params: |
|
209 | for key in r1params: | |
207 | if key not in r2params: |
|
210 | if key not in r2params: | |
208 | return False |
|
211 | return False | |
209 | value = r1params[key][0] |
|
212 | value = r1params[key][0] | |
210 | # we want to compare json payloads without worrying about ordering |
|
213 | # we want to compare json payloads without worrying about ordering | |
211 | if value.startswith(b'{') and value.endswith(b'}'): |
|
214 | if value.startswith(b'{') and value.endswith(b'}'): | |
212 | r1json = pycompat.json_loads(value) |
|
215 | r1json = pycompat.json_loads(value) | |
213 | r2json = pycompat.json_loads(r2params[key][0]) |
|
216 | r2json = pycompat.json_loads(r2params[key][0]) | |
214 | if r1json != r2json: |
|
217 | if r1json != r2json: | |
215 | return False |
|
218 | return False | |
216 | elif r2params[key][0] != value: |
|
219 | elif r2params[key][0] != value: | |
217 | return False |
|
220 | return False | |
218 | return True |
|
221 | return True | |
219 |
|
222 | |||
220 | def sanitiserequest(request): |
|
223 | def sanitiserequest(request): | |
221 | request.body = re.sub( |
|
224 | request.body = re.sub( | |
222 | br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body |
|
225 | br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body | |
223 | ) |
|
226 | ) | |
224 | return request |
|
227 | return request | |
225 |
|
228 | |||
226 | def sanitiseresponse(response): |
|
229 | def sanitiseresponse(response): | |
227 | if 'set-cookie' in response['headers']: |
|
230 | if 'set-cookie' in response['headers']: | |
228 | del response['headers']['set-cookie'] |
|
231 | del response['headers']['set-cookie'] | |
229 | return response |
|
232 | return response | |
230 |
|
233 | |||
231 | def decorate(fn): |
|
234 | def decorate(fn): | |
232 | def inner(*args, **kwargs): |
|
235 | def inner(*args, **kwargs): | |
233 | cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None)) |
|
236 | cassette = pycompat.fsdecode(kwargs.pop('test_vcr', None)) | |
234 | if cassette: |
|
237 | if cassette: | |
235 | import hgdemandimport |
|
238 | import hgdemandimport | |
236 |
|
239 | |||
237 | with hgdemandimport.deactivated(): |
|
240 | with hgdemandimport.deactivated(): | |
238 | import vcr as vcrmod |
|
241 | import vcr as vcrmod | |
239 | import vcr.stubs as stubs |
|
242 | import vcr.stubs as stubs | |
240 |
|
243 | |||
241 | vcr = vcrmod.VCR( |
|
244 | vcr = vcrmod.VCR( | |
242 | serializer='json', |
|
245 | serializer='json', | |
243 | before_record_request=sanitiserequest, |
|
246 | before_record_request=sanitiserequest, | |
244 | before_record_response=sanitiseresponse, |
|
247 | before_record_response=sanitiseresponse, | |
245 | custom_patches=[ |
|
248 | custom_patches=[ | |
246 | ( |
|
249 | ( | |
247 | urlmod, |
|
250 | urlmod, | |
248 | 'httpconnection', |
|
251 | 'httpconnection', | |
249 | stubs.VCRHTTPConnection, |
|
252 | stubs.VCRHTTPConnection, | |
250 | ), |
|
253 | ), | |
251 | ( |
|
254 | ( | |
252 | urlmod, |
|
255 | urlmod, | |
253 | 'httpsconnection', |
|
256 | 'httpsconnection', | |
254 | stubs.VCRHTTPSConnection, |
|
257 | stubs.VCRHTTPSConnection, | |
255 | ), |
|
258 | ), | |
256 | ], |
|
259 | ], | |
257 | ) |
|
260 | ) | |
258 | vcr.register_matcher('hgmatcher', hgmatcher) |
|
261 | vcr.register_matcher('hgmatcher', hgmatcher) | |
259 | with vcr.use_cassette(cassette, match_on=['hgmatcher']): |
|
262 | with vcr.use_cassette(cassette, match_on=['hgmatcher']): | |
260 | return fn(*args, **kwargs) |
|
263 | return fn(*args, **kwargs) | |
261 | return fn(*args, **kwargs) |
|
264 | return fn(*args, **kwargs) | |
262 |
|
265 | |||
263 | cmd = util.checksignature(inner, depth=2) |
|
266 | cmd = util.checksignature(inner, depth=2) | |
264 | cmd.__name__ = fn.__name__ |
|
267 | cmd.__name__ = fn.__name__ | |
265 | cmd.__doc__ = fn.__doc__ |
|
268 | cmd.__doc__ = fn.__doc__ | |
266 |
|
269 | |||
267 | return command( |
|
270 | return command( | |
268 | name, |
|
271 | name, | |
269 | fullflags, |
|
272 | fullflags, | |
270 | spec, |
|
273 | spec, | |
271 | helpcategory=helpcategory, |
|
274 | helpcategory=helpcategory, | |
272 | optionalrepo=optionalrepo, |
|
275 | optionalrepo=optionalrepo, | |
273 | )(cmd) |
|
276 | )(cmd) | |
274 |
|
277 | |||
275 | return decorate |
|
278 | return decorate | |
276 |
|
279 | |||
277 |
|
280 | |||
278 | def urlencodenested(params): |
|
281 | def urlencodenested(params): | |
279 | """like urlencode, but works with nested parameters. |
|
282 | """like urlencode, but works with nested parameters. | |
280 |
|
283 | |||
281 | For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
|
284 | For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be | |
282 | flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
|
285 | flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to | |
283 | urlencode. Note: the encoding is consistent with PHP's http_build_query. |
|
286 | urlencode. Note: the encoding is consistent with PHP's http_build_query. | |
284 | """ |
|
287 | """ | |
285 | flatparams = util.sortdict() |
|
288 | flatparams = util.sortdict() | |
286 |
|
289 | |||
287 | def process(prefix, obj): |
|
290 | def process(prefix, obj): | |
288 | if isinstance(obj, bool): |
|
291 | if isinstance(obj, bool): | |
289 | obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form |
|
292 | obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form | |
290 | lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] |
|
293 | lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] | |
291 | items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) |
|
294 | items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) | |
292 | if items is None: |
|
295 | if items is None: | |
293 | flatparams[prefix] = obj |
|
296 | flatparams[prefix] = obj | |
294 | else: |
|
297 | else: | |
295 | for k, v in items(obj): |
|
298 | for k, v in items(obj): | |
296 | if prefix: |
|
299 | if prefix: | |
297 | process(b'%s[%s]' % (prefix, k), v) |
|
300 | process(b'%s[%s]' % (prefix, k), v) | |
298 | else: |
|
301 | else: | |
299 | process(k, v) |
|
302 | process(k, v) | |
300 |
|
303 | |||
301 | process(b'', params) |
|
304 | process(b'', params) | |
302 | return util.urlreq.urlencode(flatparams) |
|
305 | return util.urlreq.urlencode(flatparams) | |
303 |
|
306 | |||
304 |
|
307 | |||
305 | def readurltoken(ui): |
|
308 | def readurltoken(ui): | |
306 | """return conduit url, token and make sure they exist |
|
309 | """return conduit url, token and make sure they exist | |
307 |
|
310 | |||
308 | Currently read from [auth] config section. In the future, it might |
|
311 | Currently read from [auth] config section. In the future, it might | |
309 | make sense to read from .arcconfig and .arcrc as well. |
|
312 | make sense to read from .arcconfig and .arcrc as well. | |
310 | """ |
|
313 | """ | |
311 | url = ui.config(b'phabricator', b'url') |
|
314 | url = ui.config(b'phabricator', b'url') | |
312 | if not url: |
|
315 | if not url: | |
313 | raise error.Abort( |
|
316 | raise error.Abort( | |
314 | _(b'config %s.%s is required') % (b'phabricator', b'url') |
|
317 | _(b'config %s.%s is required') % (b'phabricator', b'url') | |
315 | ) |
|
318 | ) | |
316 |
|
319 | |||
317 | res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) |
|
320 | res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) | |
318 | token = None |
|
321 | token = None | |
319 |
|
322 | |||
320 | if res: |
|
323 | if res: | |
321 | group, auth = res |
|
324 | group, auth = res | |
322 |
|
325 | |||
323 | ui.debug(b"using auth.%s.* for authentication\n" % group) |
|
326 | ui.debug(b"using auth.%s.* for authentication\n" % group) | |
324 |
|
327 | |||
325 | token = auth.get(b'phabtoken') |
|
328 | token = auth.get(b'phabtoken') | |
326 |
|
329 | |||
327 | if not token: |
|
330 | if not token: | |
328 | raise error.Abort( |
|
331 | raise error.Abort( | |
329 | _(b'Can\'t find conduit token associated to %s') % (url,) |
|
332 | _(b'Can\'t find conduit token associated to %s') % (url,) | |
330 | ) |
|
333 | ) | |
331 |
|
334 | |||
332 | return url, token |
|
335 | return url, token | |
333 |
|
336 | |||
334 |
|
337 | |||
335 | def callconduit(ui, name, params): |
|
338 | def callconduit(ui, name, params): | |
336 | """call Conduit API, params is a dict. return json.loads result, or None""" |
|
339 | """call Conduit API, params is a dict. return json.loads result, or None""" | |
337 | host, token = readurltoken(ui) |
|
340 | host, token = readurltoken(ui) | |
338 | url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() |
|
341 | url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() | |
339 | ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) |
|
342 | ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params))) | |
340 | params = params.copy() |
|
343 | params = params.copy() | |
341 | params[b'__conduit__'] = { |
|
344 | params[b'__conduit__'] = { | |
342 | b'token': token, |
|
345 | b'token': token, | |
343 | } |
|
346 | } | |
344 | rawdata = { |
|
347 | rawdata = { | |
345 | b'params': templatefilters.json(params), |
|
348 | b'params': templatefilters.json(params), | |
346 | b'output': b'json', |
|
349 | b'output': b'json', | |
347 | b'__conduit__': 1, |
|
350 | b'__conduit__': 1, | |
348 | } |
|
351 | } | |
349 | data = urlencodenested(rawdata) |
|
352 | data = urlencodenested(rawdata) | |
350 | curlcmd = ui.config(b'phabricator', b'curlcmd') |
|
353 | curlcmd = ui.config(b'phabricator', b'curlcmd') | |
351 | if curlcmd: |
|
354 | if curlcmd: | |
352 | sin, sout = procutil.popen2( |
|
355 | sin, sout = procutil.popen2( | |
353 | b'%s -d @- %s' % (curlcmd, procutil.shellquote(url)) |
|
356 | b'%s -d @- %s' % (curlcmd, procutil.shellquote(url)) | |
354 | ) |
|
357 | ) | |
355 | sin.write(data) |
|
358 | sin.write(data) | |
356 | sin.close() |
|
359 | sin.close() | |
357 | body = sout.read() |
|
360 | body = sout.read() | |
358 | else: |
|
361 | else: | |
359 | urlopener = urlmod.opener(ui, authinfo) |
|
362 | urlopener = urlmod.opener(ui, authinfo) | |
360 | request = util.urlreq.request(pycompat.strurl(url), data=data) |
|
363 | request = util.urlreq.request(pycompat.strurl(url), data=data) | |
361 | with contextlib.closing(urlopener.open(request)) as rsp: |
|
364 | with contextlib.closing(urlopener.open(request)) as rsp: | |
362 | body = rsp.read() |
|
365 | body = rsp.read() | |
363 | ui.debug(b'Conduit Response: %s\n' % body) |
|
366 | ui.debug(b'Conduit Response: %s\n' % body) | |
364 | parsed = pycompat.rapply( |
|
367 | parsed = pycompat.rapply( | |
365 | lambda x: encoding.unitolocal(x) |
|
368 | lambda x: encoding.unitolocal(x) | |
366 | if isinstance(x, pycompat.unicode) |
|
369 | if isinstance(x, pycompat.unicode) | |
367 | else x, |
|
370 | else x, | |
368 | # json.loads only accepts bytes from py3.6+ |
|
371 | # json.loads only accepts bytes from py3.6+ | |
369 | pycompat.json_loads(encoding.unifromlocal(body)), |
|
372 | pycompat.json_loads(encoding.unifromlocal(body)), | |
370 | ) |
|
373 | ) | |
371 | if parsed.get(b'error_code'): |
|
374 | if parsed.get(b'error_code'): | |
372 | msg = _(b'Conduit Error (%s): %s') % ( |
|
375 | msg = _(b'Conduit Error (%s): %s') % ( | |
373 | parsed[b'error_code'], |
|
376 | parsed[b'error_code'], | |
374 | parsed[b'error_info'], |
|
377 | parsed[b'error_info'], | |
375 | ) |
|
378 | ) | |
376 | raise error.Abort(msg) |
|
379 | raise error.Abort(msg) | |
377 | return parsed[b'result'] |
|
380 | return parsed[b'result'] | |
378 |
|
381 | |||
379 |
|
382 | |||
380 | @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) |
|
383 | @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) | |
381 | def debugcallconduit(ui, repo, name): |
|
384 | def debugcallconduit(ui, repo, name): | |
382 | """call Conduit API |
|
385 | """call Conduit API | |
383 |
|
386 | |||
384 | Call parameters are read from stdin as a JSON blob. Result will be written |
|
387 | Call parameters are read from stdin as a JSON blob. Result will be written | |
385 | to stdout as a JSON blob. |
|
388 | to stdout as a JSON blob. | |
386 | """ |
|
389 | """ | |
387 | # json.loads only accepts bytes from 3.6+ |
|
390 | # json.loads only accepts bytes from 3.6+ | |
388 | rawparams = encoding.unifromlocal(ui.fin.read()) |
|
391 | rawparams = encoding.unifromlocal(ui.fin.read()) | |
389 | # json.loads only returns unicode strings |
|
392 | # json.loads only returns unicode strings | |
390 | params = pycompat.rapply( |
|
393 | params = pycompat.rapply( | |
391 | lambda x: encoding.unitolocal(x) |
|
394 | lambda x: encoding.unitolocal(x) | |
392 | if isinstance(x, pycompat.unicode) |
|
395 | if isinstance(x, pycompat.unicode) | |
393 | else x, |
|
396 | else x, | |
394 | pycompat.json_loads(rawparams), |
|
397 | pycompat.json_loads(rawparams), | |
395 | ) |
|
398 | ) | |
396 | # json.dumps only accepts unicode strings |
|
399 | # json.dumps only accepts unicode strings | |
397 | result = pycompat.rapply( |
|
400 | result = pycompat.rapply( | |
398 | lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x, |
|
401 | lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x, | |
399 | callconduit(ui, name, params), |
|
402 | callconduit(ui, name, params), | |
400 | ) |
|
403 | ) | |
401 | s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) |
|
404 | s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) | |
402 | ui.write(b'%s\n' % encoding.unitolocal(s)) |
|
405 | ui.write(b'%s\n' % encoding.unitolocal(s)) | |
403 |
|
406 | |||
404 |
|
407 | |||
405 | def getrepophid(repo): |
|
408 | def getrepophid(repo): | |
406 | """given callsign, return repository PHID or None""" |
|
409 | """given callsign, return repository PHID or None""" | |
407 | # developer config: phabricator.repophid |
|
410 | # developer config: phabricator.repophid | |
408 | repophid = repo.ui.config(b'phabricator', b'repophid') |
|
411 | repophid = repo.ui.config(b'phabricator', b'repophid') | |
409 | if repophid: |
|
412 | if repophid: | |
410 | return repophid |
|
413 | return repophid | |
411 | callsign = repo.ui.config(b'phabricator', b'callsign') |
|
414 | callsign = repo.ui.config(b'phabricator', b'callsign') | |
412 | if not callsign: |
|
415 | if not callsign: | |
413 | return None |
|
416 | return None | |
414 | query = callconduit( |
|
417 | query = callconduit( | |
415 | repo.ui, |
|
418 | repo.ui, | |
416 | b'diffusion.repository.search', |
|
419 | b'diffusion.repository.search', | |
417 | {b'constraints': {b'callsigns': [callsign]}}, |
|
420 | {b'constraints': {b'callsigns': [callsign]}}, | |
418 | ) |
|
421 | ) | |
419 | if len(query[b'data']) == 0: |
|
422 | if len(query[b'data']) == 0: | |
420 | return None |
|
423 | return None | |
421 | repophid = query[b'data'][0][b'phid'] |
|
424 | repophid = query[b'data'][0][b'phid'] | |
422 | repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
|
425 | repo.ui.setconfig(b'phabricator', b'repophid', repophid) | |
423 | return repophid |
|
426 | return repophid | |
424 |
|
427 | |||
425 |
|
428 | |||
426 | _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
|
429 | _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') | |
427 | _differentialrevisiondescre = re.compile( |
|
430 | _differentialrevisiondescre = re.compile( | |
428 | br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M |
|
431 | br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M | |
429 | ) |
|
432 | ) | |
430 |
|
433 | |||
431 |
|
434 | |||
432 | def getoldnodedrevmap(repo, nodelist): |
|
435 | def getoldnodedrevmap(repo, nodelist): | |
433 | """find previous nodes that has been sent to Phabricator |
|
436 | """find previous nodes that has been sent to Phabricator | |
434 |
|
437 | |||
435 | return {node: (oldnode, Differential diff, Differential Revision ID)} |
|
438 | return {node: (oldnode, Differential diff, Differential Revision ID)} | |
436 | for node in nodelist with known previous sent versions, or associated |
|
439 | for node in nodelist with known previous sent versions, or associated | |
437 | Differential Revision IDs. ``oldnode`` and ``Differential diff`` could |
|
440 | Differential Revision IDs. ``oldnode`` and ``Differential diff`` could | |
438 | be ``None``. |
|
441 | be ``None``. | |
439 |
|
442 | |||
440 | Examines commit messages like "Differential Revision:" to get the |
|
443 | Examines commit messages like "Differential Revision:" to get the | |
441 | association information. |
|
444 | association information. | |
442 |
|
445 | |||
443 | If such commit message line is not found, examines all precursors and their |
|
446 | If such commit message line is not found, examines all precursors and their | |
444 | tags. Tags with format like "D1234" are considered a match and the node |
|
447 | tags. Tags with format like "D1234" are considered a match and the node | |
445 | with that tag, and the number after "D" (ex. 1234) will be returned. |
|
448 | with that tag, and the number after "D" (ex. 1234) will be returned. | |
446 |
|
449 | |||
447 | The ``old node``, if not None, is guaranteed to be the last diff of |
|
450 | The ``old node``, if not None, is guaranteed to be the last diff of | |
448 | corresponding Differential Revision, and exist in the repo. |
|
451 | corresponding Differential Revision, and exist in the repo. | |
449 | """ |
|
452 | """ | |
450 | unfi = repo.unfiltered() |
|
453 | unfi = repo.unfiltered() | |
451 | has_node = unfi.changelog.index.has_node |
|
454 | has_node = unfi.changelog.index.has_node | |
452 |
|
455 | |||
453 | result = {} # {node: (oldnode?, lastdiff?, drev)} |
|
456 | result = {} # {node: (oldnode?, lastdiff?, drev)} | |
454 | toconfirm = {} # {node: (force, {precnode}, drev)} |
|
457 | toconfirm = {} # {node: (force, {precnode}, drev)} | |
455 | for node in nodelist: |
|
458 | for node in nodelist: | |
456 | ctx = unfi[node] |
|
459 | ctx = unfi[node] | |
457 | # For tags like "D123", put them into "toconfirm" to verify later |
|
460 | # For tags like "D123", put them into "toconfirm" to verify later | |
458 | precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node])) |
|
461 | precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node])) | |
459 | for n in precnodes: |
|
462 | for n in precnodes: | |
460 | if has_node(n): |
|
463 | if has_node(n): | |
461 | for tag in unfi.nodetags(n): |
|
464 | for tag in unfi.nodetags(n): | |
462 | m = _differentialrevisiontagre.match(tag) |
|
465 | m = _differentialrevisiontagre.match(tag) | |
463 | if m: |
|
466 | if m: | |
464 | toconfirm[node] = (0, set(precnodes), int(m.group(1))) |
|
467 | toconfirm[node] = (0, set(precnodes), int(m.group(1))) | |
465 | break |
|
468 | break | |
466 | else: |
|
469 | else: | |
467 | continue # move to next predecessor |
|
470 | continue # move to next predecessor | |
468 | break # found a tag, stop |
|
471 | break # found a tag, stop | |
469 | else: |
|
472 | else: | |
470 | # Check commit message |
|
473 | # Check commit message | |
471 | m = _differentialrevisiondescre.search(ctx.description()) |
|
474 | m = _differentialrevisiondescre.search(ctx.description()) | |
472 | if m: |
|
475 | if m: | |
473 | toconfirm[node] = (1, set(precnodes), int(m.group('id'))) |
|
476 | toconfirm[node] = (1, set(precnodes), int(m.group('id'))) | |
474 |
|
477 | |||
475 | # Double check if tags are genuine by collecting all old nodes from |
|
478 | # Double check if tags are genuine by collecting all old nodes from | |
476 | # Phabricator, and expect precursors overlap with it. |
|
479 | # Phabricator, and expect precursors overlap with it. | |
477 | if toconfirm: |
|
480 | if toconfirm: | |
478 | drevs = [drev for force, precs, drev in toconfirm.values()] |
|
481 | drevs = [drev for force, precs, drev in toconfirm.values()] | |
479 | alldiffs = callconduit( |
|
482 | alldiffs = callconduit( | |
480 | unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs} |
|
483 | unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs} | |
481 | ) |
|
484 | ) | |
482 | getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None |
|
485 | getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None | |
483 | for newnode, (force, precset, drev) in toconfirm.items(): |
|
486 | for newnode, (force, precset, drev) in toconfirm.items(): | |
484 | diffs = [ |
|
487 | diffs = [ | |
485 | d for d in alldiffs.values() if int(d[b'revisionID']) == drev |
|
488 | d for d in alldiffs.values() if int(d[b'revisionID']) == drev | |
486 | ] |
|
489 | ] | |
487 |
|
490 | |||
488 | # "precursors" as known by Phabricator |
|
491 | # "precursors" as known by Phabricator | |
489 | phprecset = {getnode(d) for d in diffs} |
|
492 | phprecset = {getnode(d) for d in diffs} | |
490 |
|
493 | |||
491 | # Ignore if precursors (Phabricator and local repo) do not overlap, |
|
494 | # Ignore if precursors (Phabricator and local repo) do not overlap, | |
492 | # and force is not set (when commit message says nothing) |
|
495 | # and force is not set (when commit message says nothing) | |
493 | if not force and not bool(phprecset & precset): |
|
496 | if not force and not bool(phprecset & precset): | |
494 | tagname = b'D%d' % drev |
|
497 | tagname = b'D%d' % drev | |
495 | tags.tag( |
|
498 | tags.tag( | |
496 | repo, |
|
499 | repo, | |
497 | tagname, |
|
500 | tagname, | |
498 | nullid, |
|
501 | nullid, | |
499 | message=None, |
|
502 | message=None, | |
500 | user=None, |
|
503 | user=None, | |
501 | date=None, |
|
504 | date=None, | |
502 | local=True, |
|
505 | local=True, | |
503 | ) |
|
506 | ) | |
504 | unfi.ui.warn( |
|
507 | unfi.ui.warn( | |
505 | _( |
|
508 | _( | |
506 | b'D%d: local tag removed - does not match ' |
|
509 | b'D%d: local tag removed - does not match ' | |
507 | b'Differential history\n' |
|
510 | b'Differential history\n' | |
508 | ) |
|
511 | ) | |
509 | % drev |
|
512 | % drev | |
510 | ) |
|
513 | ) | |
511 | continue |
|
514 | continue | |
512 |
|
515 | |||
513 | # Find the last node using Phabricator metadata, and make sure it |
|
516 | # Find the last node using Phabricator metadata, and make sure it | |
514 | # exists in the repo |
|
517 | # exists in the repo | |
515 | oldnode = lastdiff = None |
|
518 | oldnode = lastdiff = None | |
516 | if diffs: |
|
519 | if diffs: | |
517 | lastdiff = max(diffs, key=lambda d: int(d[b'id'])) |
|
520 | lastdiff = max(diffs, key=lambda d: int(d[b'id'])) | |
518 | oldnode = getnode(lastdiff) |
|
521 | oldnode = getnode(lastdiff) | |
519 | if oldnode and not has_node(oldnode): |
|
522 | if oldnode and not has_node(oldnode): | |
520 | oldnode = None |
|
523 | oldnode = None | |
521 |
|
524 | |||
522 | result[newnode] = (oldnode, lastdiff, drev) |
|
525 | result[newnode] = (oldnode, lastdiff, drev) | |
523 |
|
526 | |||
524 | return result |
|
527 | return result | |
525 |
|
528 | |||
526 |
|
529 | |||
527 | def getdrevmap(repo, revs): |
|
530 | def getdrevmap(repo, revs): | |
528 | """Return a dict mapping each rev in `revs` to their Differential Revision |
|
531 | """Return a dict mapping each rev in `revs` to their Differential Revision | |
529 | ID or None. |
|
532 | ID or None. | |
530 | """ |
|
533 | """ | |
531 | result = {} |
|
534 | result = {} | |
532 | for rev in revs: |
|
535 | for rev in revs: | |
533 | result[rev] = None |
|
536 | result[rev] = None | |
534 | ctx = repo[rev] |
|
537 | ctx = repo[rev] | |
535 | # Check commit message |
|
538 | # Check commit message | |
536 | m = _differentialrevisiondescre.search(ctx.description()) |
|
539 | m = _differentialrevisiondescre.search(ctx.description()) | |
537 | if m: |
|
540 | if m: | |
538 | result[rev] = int(m.group('id')) |
|
541 | result[rev] = int(m.group('id')) | |
539 | continue |
|
542 | continue | |
540 | # Check tags |
|
543 | # Check tags | |
541 | for tag in repo.nodetags(ctx.node()): |
|
544 | for tag in repo.nodetags(ctx.node()): | |
542 | m = _differentialrevisiontagre.match(tag) |
|
545 | m = _differentialrevisiontagre.match(tag) | |
543 | if m: |
|
546 | if m: | |
544 | result[rev] = int(m.group(1)) |
|
547 | result[rev] = int(m.group(1)) | |
545 | break |
|
548 | break | |
546 |
|
549 | |||
547 | return result |
|
550 | return result | |
548 |
|
551 | |||
549 |
|
552 | |||
550 | def getdiff(ctx, diffopts): |
|
553 | def getdiff(ctx, diffopts): | |
551 | """plain-text diff without header (user, commit message, etc)""" |
|
554 | """plain-text diff without header (user, commit message, etc)""" | |
552 | output = util.stringio() |
|
555 | output = util.stringio() | |
553 | for chunk, _label in patch.diffui( |
|
556 | for chunk, _label in patch.diffui( | |
554 | ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts |
|
557 | ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts | |
555 | ): |
|
558 | ): | |
556 | output.write(chunk) |
|
559 | output.write(chunk) | |
557 | return output.getvalue() |
|
560 | return output.getvalue() | |
558 |
|
561 | |||
559 |
|
562 | |||
560 | class DiffChangeType(object): |
|
563 | class DiffChangeType(object): | |
561 | ADD = 1 |
|
564 | ADD = 1 | |
562 | CHANGE = 2 |
|
565 | CHANGE = 2 | |
563 | DELETE = 3 |
|
566 | DELETE = 3 | |
564 | MOVE_AWAY = 4 |
|
567 | MOVE_AWAY = 4 | |
565 | COPY_AWAY = 5 |
|
568 | COPY_AWAY = 5 | |
566 | MOVE_HERE = 6 |
|
569 | MOVE_HERE = 6 | |
567 | COPY_HERE = 7 |
|
570 | COPY_HERE = 7 | |
568 | MULTICOPY = 8 |
|
571 | MULTICOPY = 8 | |
569 |
|
572 | |||
570 |
|
573 | |||
571 | class DiffFileType(object): |
|
574 | class DiffFileType(object): | |
572 | TEXT = 1 |
|
575 | TEXT = 1 | |
573 | IMAGE = 2 |
|
576 | IMAGE = 2 | |
574 | BINARY = 3 |
|
577 | BINARY = 3 | |
575 |
|
578 | |||
576 |
|
579 | |||
577 | @attr.s |
|
580 | @attr.s | |
578 | class phabhunk(dict): |
|
581 | class phabhunk(dict): | |
579 | """Represents a Differential hunk, which is owned by a Differential change |
|
582 | """Represents a Differential hunk, which is owned by a Differential change | |
580 | """ |
|
583 | """ | |
581 |
|
584 | |||
582 | oldOffset = attr.ib(default=0) # camelcase-required |
|
585 | oldOffset = attr.ib(default=0) # camelcase-required | |
583 | oldLength = attr.ib(default=0) # camelcase-required |
|
586 | oldLength = attr.ib(default=0) # camelcase-required | |
584 | newOffset = attr.ib(default=0) # camelcase-required |
|
587 | newOffset = attr.ib(default=0) # camelcase-required | |
585 | newLength = attr.ib(default=0) # camelcase-required |
|
588 | newLength = attr.ib(default=0) # camelcase-required | |
586 | corpus = attr.ib(default='') |
|
589 | corpus = attr.ib(default='') | |
587 | # These get added to the phabchange's equivalents |
|
590 | # These get added to the phabchange's equivalents | |
588 | addLines = attr.ib(default=0) # camelcase-required |
|
591 | addLines = attr.ib(default=0) # camelcase-required | |
589 | delLines = attr.ib(default=0) # camelcase-required |
|
592 | delLines = attr.ib(default=0) # camelcase-required | |
590 |
|
593 | |||
591 |
|
594 | |||
592 | @attr.s |
|
595 | @attr.s | |
593 | class phabchange(object): |
|
596 | class phabchange(object): | |
594 | """Represents a Differential change, owns Differential hunks and owned by a |
|
597 | """Represents a Differential change, owns Differential hunks and owned by a | |
595 | Differential diff. Each one represents one file in a diff. |
|
598 | Differential diff. Each one represents one file in a diff. | |
596 | """ |
|
599 | """ | |
597 |
|
600 | |||
598 | currentPath = attr.ib(default=None) # camelcase-required |
|
601 | currentPath = attr.ib(default=None) # camelcase-required | |
599 | oldPath = attr.ib(default=None) # camelcase-required |
|
602 | oldPath = attr.ib(default=None) # camelcase-required | |
600 | awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required |
|
603 | awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required | |
601 | metadata = attr.ib(default=attr.Factory(dict)) |
|
604 | metadata = attr.ib(default=attr.Factory(dict)) | |
602 | oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
605 | oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required | |
603 | newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required |
|
606 | newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required | |
604 | type = attr.ib(default=DiffChangeType.CHANGE) |
|
607 | type = attr.ib(default=DiffChangeType.CHANGE) | |
605 | fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required |
|
608 | fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required | |
606 | commitHash = attr.ib(default=None) # camelcase-required |
|
609 | commitHash = attr.ib(default=None) # camelcase-required | |
607 | addLines = attr.ib(default=0) # camelcase-required |
|
610 | addLines = attr.ib(default=0) # camelcase-required | |
608 | delLines = attr.ib(default=0) # camelcase-required |
|
611 | delLines = attr.ib(default=0) # camelcase-required | |
609 | hunks = attr.ib(default=attr.Factory(list)) |
|
612 | hunks = attr.ib(default=attr.Factory(list)) | |
610 |
|
613 | |||
611 | def copynewmetadatatoold(self): |
|
614 | def copynewmetadatatoold(self): | |
612 | for key in list(self.metadata.keys()): |
|
615 | for key in list(self.metadata.keys()): | |
613 | newkey = key.replace(b'new:', b'old:') |
|
616 | newkey = key.replace(b'new:', b'old:') | |
614 | self.metadata[newkey] = self.metadata[key] |
|
617 | self.metadata[newkey] = self.metadata[key] | |
615 |
|
618 | |||
616 | def addoldmode(self, value): |
|
619 | def addoldmode(self, value): | |
617 | self.oldProperties[b'unix:filemode'] = value |
|
620 | self.oldProperties[b'unix:filemode'] = value | |
618 |
|
621 | |||
619 | def addnewmode(self, value): |
|
622 | def addnewmode(self, value): | |
620 | self.newProperties[b'unix:filemode'] = value |
|
623 | self.newProperties[b'unix:filemode'] = value | |
621 |
|
624 | |||
622 | def addhunk(self, hunk): |
|
625 | def addhunk(self, hunk): | |
623 | if not isinstance(hunk, phabhunk): |
|
626 | if not isinstance(hunk, phabhunk): | |
624 | raise error.Abort(b'phabchange.addhunk only takes phabhunks') |
|
627 | raise error.Abort(b'phabchange.addhunk only takes phabhunks') | |
625 | self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk))) |
|
628 | self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk))) | |
626 | # It's useful to include these stats since the Phab web UI shows them, |
|
629 | # It's useful to include these stats since the Phab web UI shows them, | |
627 | # and uses them to estimate how large a change a Revision is. Also used |
|
630 | # and uses them to estimate how large a change a Revision is. Also used | |
628 | # in email subjects for the [+++--] bit. |
|
631 | # in email subjects for the [+++--] bit. | |
629 | self.addLines += hunk.addLines |
|
632 | self.addLines += hunk.addLines | |
630 | self.delLines += hunk.delLines |
|
633 | self.delLines += hunk.delLines | |
631 |
|
634 | |||
632 |
|
635 | |||
633 | @attr.s |
|
636 | @attr.s | |
634 | class phabdiff(object): |
|
637 | class phabdiff(object): | |
635 | """Represents a Differential diff, owns Differential changes. Corresponds |
|
638 | """Represents a Differential diff, owns Differential changes. Corresponds | |
636 | to a commit. |
|
639 | to a commit. | |
637 | """ |
|
640 | """ | |
638 |
|
641 | |||
639 | # Doesn't seem to be any reason to send this (output of uname -n) |
|
642 | # Doesn't seem to be any reason to send this (output of uname -n) | |
640 | sourceMachine = attr.ib(default=b'') # camelcase-required |
|
643 | sourceMachine = attr.ib(default=b'') # camelcase-required | |
641 | sourcePath = attr.ib(default=b'/') # camelcase-required |
|
644 | sourcePath = attr.ib(default=b'/') # camelcase-required | |
642 | sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required |
|
645 | sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required | |
643 | sourceControlPath = attr.ib(default=b'/') # camelcase-required |
|
646 | sourceControlPath = attr.ib(default=b'/') # camelcase-required | |
644 | sourceControlSystem = attr.ib(default=b'hg') # camelcase-required |
|
647 | sourceControlSystem = attr.ib(default=b'hg') # camelcase-required | |
645 | branch = attr.ib(default=b'default') |
|
648 | branch = attr.ib(default=b'default') | |
646 | bookmark = attr.ib(default=None) |
|
649 | bookmark = attr.ib(default=None) | |
647 | creationMethod = attr.ib(default=b'phabsend') # camelcase-required |
|
650 | creationMethod = attr.ib(default=b'phabsend') # camelcase-required | |
648 | lintStatus = attr.ib(default=b'none') # camelcase-required |
|
651 | lintStatus = attr.ib(default=b'none') # camelcase-required | |
649 | unitStatus = attr.ib(default=b'none') # camelcase-required |
|
652 | unitStatus = attr.ib(default=b'none') # camelcase-required | |
650 | changes = attr.ib(default=attr.Factory(dict)) |
|
653 | changes = attr.ib(default=attr.Factory(dict)) | |
651 | repositoryPHID = attr.ib(default=None) # camelcase-required |
|
654 | repositoryPHID = attr.ib(default=None) # camelcase-required | |
652 |
|
655 | |||
653 | def addchange(self, change): |
|
656 | def addchange(self, change): | |
654 | if not isinstance(change, phabchange): |
|
657 | if not isinstance(change, phabchange): | |
655 | raise error.Abort(b'phabdiff.addchange only takes phabchanges') |
|
658 | raise error.Abort(b'phabdiff.addchange only takes phabchanges') | |
656 | self.changes[change.currentPath] = pycompat.byteskwargs( |
|
659 | self.changes[change.currentPath] = pycompat.byteskwargs( | |
657 | attr.asdict(change) |
|
660 | attr.asdict(change) | |
658 | ) |
|
661 | ) | |
659 |
|
662 | |||
660 |
|
663 | |||
661 | def maketext(pchange, ctx, fname): |
|
664 | def maketext(pchange, ctx, fname): | |
662 | """populate the phabchange for a text file""" |
|
665 | """populate the phabchange for a text file""" | |
663 | repo = ctx.repo() |
|
666 | repo = ctx.repo() | |
664 | fmatcher = match.exact([fname]) |
|
667 | fmatcher = match.exact([fname]) | |
665 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
668 | diffopts = mdiff.diffopts(git=True, context=32767) | |
666 | _pfctx, _fctx, header, fhunks = next( |
|
669 | _pfctx, _fctx, header, fhunks = next( | |
667 | patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts) |
|
670 | patch.diffhunks(repo, ctx.p1(), ctx, fmatcher, opts=diffopts) | |
668 | ) |
|
671 | ) | |
669 |
|
672 | |||
670 | for fhunk in fhunks: |
|
673 | for fhunk in fhunks: | |
671 | (oldOffset, oldLength, newOffset, newLength), lines = fhunk |
|
674 | (oldOffset, oldLength, newOffset, newLength), lines = fhunk | |
672 | corpus = b''.join(lines[1:]) |
|
675 | corpus = b''.join(lines[1:]) | |
673 | shunk = list(header) |
|
676 | shunk = list(header) | |
674 | shunk.extend(lines) |
|
677 | shunk.extend(lines) | |
675 | _mf, _mt, addLines, delLines, _hb = patch.diffstatsum( |
|
678 | _mf, _mt, addLines, delLines, _hb = patch.diffstatsum( | |
676 | patch.diffstatdata(util.iterlines(shunk)) |
|
679 | patch.diffstatdata(util.iterlines(shunk)) | |
677 | ) |
|
680 | ) | |
678 | pchange.addhunk( |
|
681 | pchange.addhunk( | |
679 | phabhunk( |
|
682 | phabhunk( | |
680 | oldOffset, |
|
683 | oldOffset, | |
681 | oldLength, |
|
684 | oldLength, | |
682 | newOffset, |
|
685 | newOffset, | |
683 | newLength, |
|
686 | newLength, | |
684 | corpus, |
|
687 | corpus, | |
685 | addLines, |
|
688 | addLines, | |
686 | delLines, |
|
689 | delLines, | |
687 | ) |
|
690 | ) | |
688 | ) |
|
691 | ) | |
689 |
|
692 | |||
690 |
|
693 | |||
691 | def uploadchunks(fctx, fphid): |
|
694 | def uploadchunks(fctx, fphid): | |
692 | """upload large binary files as separate chunks. |
|
695 | """upload large binary files as separate chunks. | |
693 | Phab requests chunking over 8MiB, and splits into 4MiB chunks |
|
696 | Phab requests chunking over 8MiB, and splits into 4MiB chunks | |
694 | """ |
|
697 | """ | |
695 | ui = fctx.repo().ui |
|
698 | ui = fctx.repo().ui | |
696 | chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid}) |
|
699 | chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid}) | |
697 | with ui.makeprogress( |
|
700 | with ui.makeprogress( | |
698 | _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks) |
|
701 | _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks) | |
699 | ) as progress: |
|
702 | ) as progress: | |
700 | for chunk in chunks: |
|
703 | for chunk in chunks: | |
701 | progress.increment() |
|
704 | progress.increment() | |
702 | if chunk[b'complete']: |
|
705 | if chunk[b'complete']: | |
703 | continue |
|
706 | continue | |
704 | bstart = int(chunk[b'byteStart']) |
|
707 | bstart = int(chunk[b'byteStart']) | |
705 | bend = int(chunk[b'byteEnd']) |
|
708 | bend = int(chunk[b'byteEnd']) | |
706 | callconduit( |
|
709 | callconduit( | |
707 | ui, |
|
710 | ui, | |
708 | b'file.uploadchunk', |
|
711 | b'file.uploadchunk', | |
709 | { |
|
712 | { | |
710 | b'filePHID': fphid, |
|
713 | b'filePHID': fphid, | |
711 | b'byteStart': bstart, |
|
714 | b'byteStart': bstart, | |
712 | b'data': base64.b64encode(fctx.data()[bstart:bend]), |
|
715 | b'data': base64.b64encode(fctx.data()[bstart:bend]), | |
713 | b'dataEncoding': b'base64', |
|
716 | b'dataEncoding': b'base64', | |
714 | }, |
|
717 | }, | |
715 | ) |
|
718 | ) | |
716 |
|
719 | |||
717 |
|
720 | |||
718 | def uploadfile(fctx): |
|
721 | def uploadfile(fctx): | |
719 | """upload binary files to Phabricator""" |
|
722 | """upload binary files to Phabricator""" | |
720 | repo = fctx.repo() |
|
723 | repo = fctx.repo() | |
721 | ui = repo.ui |
|
724 | ui = repo.ui | |
722 | fname = fctx.path() |
|
725 | fname = fctx.path() | |
723 | size = fctx.size() |
|
726 | size = fctx.size() | |
724 | fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest()) |
|
727 | fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest()) | |
725 |
|
728 | |||
726 | # an allocate call is required first to see if an upload is even required |
|
729 | # an allocate call is required first to see if an upload is even required | |
727 | # (Phab might already have it) and to determine if chunking is needed |
|
730 | # (Phab might already have it) and to determine if chunking is needed | |
728 | allocateparams = { |
|
731 | allocateparams = { | |
729 | b'name': fname, |
|
732 | b'name': fname, | |
730 | b'contentLength': size, |
|
733 | b'contentLength': size, | |
731 | b'contentHash': fhash, |
|
734 | b'contentHash': fhash, | |
732 | } |
|
735 | } | |
733 | filealloc = callconduit(ui, b'file.allocate', allocateparams) |
|
736 | filealloc = callconduit(ui, b'file.allocate', allocateparams) | |
734 | fphid = filealloc[b'filePHID'] |
|
737 | fphid = filealloc[b'filePHID'] | |
735 |
|
738 | |||
736 | if filealloc[b'upload']: |
|
739 | if filealloc[b'upload']: | |
737 | ui.write(_(b'uploading %s\n') % bytes(fctx)) |
|
740 | ui.write(_(b'uploading %s\n') % bytes(fctx)) | |
738 | if not fphid: |
|
741 | if not fphid: | |
739 | uploadparams = { |
|
742 | uploadparams = { | |
740 | b'name': fname, |
|
743 | b'name': fname, | |
741 | b'data_base64': base64.b64encode(fctx.data()), |
|
744 | b'data_base64': base64.b64encode(fctx.data()), | |
742 | } |
|
745 | } | |
743 | fphid = callconduit(ui, b'file.upload', uploadparams) |
|
746 | fphid = callconduit(ui, b'file.upload', uploadparams) | |
744 | else: |
|
747 | else: | |
745 | uploadchunks(fctx, fphid) |
|
748 | uploadchunks(fctx, fphid) | |
746 | else: |
|
749 | else: | |
747 | ui.debug(b'server already has %s\n' % bytes(fctx)) |
|
750 | ui.debug(b'server already has %s\n' % bytes(fctx)) | |
748 |
|
751 | |||
749 | if not fphid: |
|
752 | if not fphid: | |
750 | raise error.Abort(b'Upload of %s failed.' % bytes(fctx)) |
|
753 | raise error.Abort(b'Upload of %s failed.' % bytes(fctx)) | |
751 |
|
754 | |||
752 | return fphid |
|
755 | return fphid | |
753 |
|
756 | |||
754 |
|
757 | |||
755 | def addoldbinary(pchange, oldfctx, fctx): |
|
758 | def addoldbinary(pchange, oldfctx, fctx): | |
756 | """add the metadata for the previous version of a binary file to the |
|
759 | """add the metadata for the previous version of a binary file to the | |
757 | phabchange for the new version |
|
760 | phabchange for the new version | |
758 |
|
761 | |||
759 | ``oldfctx`` is the previous version of the file; ``fctx`` is the new |
|
762 | ``oldfctx`` is the previous version of the file; ``fctx`` is the new | |
760 | version of the file, or None if the file is being removed. |
|
763 | version of the file, or None if the file is being removed. | |
761 | """ |
|
764 | """ | |
762 | if not fctx or fctx.cmp(oldfctx): |
|
765 | if not fctx or fctx.cmp(oldfctx): | |
763 | # Files differ, add the old one |
|
766 | # Files differ, add the old one | |
764 | pchange.metadata[b'old:file:size'] = oldfctx.size() |
|
767 | pchange.metadata[b'old:file:size'] = oldfctx.size() | |
765 | mimeguess, _enc = mimetypes.guess_type( |
|
768 | mimeguess, _enc = mimetypes.guess_type( | |
766 | encoding.unifromlocal(oldfctx.path()) |
|
769 | encoding.unifromlocal(oldfctx.path()) | |
767 | ) |
|
770 | ) | |
768 | if mimeguess: |
|
771 | if mimeguess: | |
769 | pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr( |
|
772 | pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr( | |
770 | mimeguess |
|
773 | mimeguess | |
771 | ) |
|
774 | ) | |
772 | fphid = uploadfile(oldfctx) |
|
775 | fphid = uploadfile(oldfctx) | |
773 | pchange.metadata[b'old:binary-phid'] = fphid |
|
776 | pchange.metadata[b'old:binary-phid'] = fphid | |
774 | else: |
|
777 | else: | |
775 | # If it's left as IMAGE/BINARY web UI might try to display it |
|
778 | # If it's left as IMAGE/BINARY web UI might try to display it | |
776 | pchange.fileType = DiffFileType.TEXT |
|
779 | pchange.fileType = DiffFileType.TEXT | |
777 | pchange.copynewmetadatatoold() |
|
780 | pchange.copynewmetadatatoold() | |
778 |
|
781 | |||
779 |
|
782 | |||
780 | def makebinary(pchange, fctx): |
|
783 | def makebinary(pchange, fctx): | |
781 | """populate the phabchange for a binary file""" |
|
784 | """populate the phabchange for a binary file""" | |
782 | pchange.fileType = DiffFileType.BINARY |
|
785 | pchange.fileType = DiffFileType.BINARY | |
783 | fphid = uploadfile(fctx) |
|
786 | fphid = uploadfile(fctx) | |
784 | pchange.metadata[b'new:binary-phid'] = fphid |
|
787 | pchange.metadata[b'new:binary-phid'] = fphid | |
785 | pchange.metadata[b'new:file:size'] = fctx.size() |
|
788 | pchange.metadata[b'new:file:size'] = fctx.size() | |
786 | mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path())) |
|
789 | mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path())) | |
787 | if mimeguess: |
|
790 | if mimeguess: | |
788 | mimeguess = pycompat.bytestr(mimeguess) |
|
791 | mimeguess = pycompat.bytestr(mimeguess) | |
789 | pchange.metadata[b'new:file:mime-type'] = mimeguess |
|
792 | pchange.metadata[b'new:file:mime-type'] = mimeguess | |
790 | if mimeguess.startswith(b'image/'): |
|
793 | if mimeguess.startswith(b'image/'): | |
791 | pchange.fileType = DiffFileType.IMAGE |
|
794 | pchange.fileType = DiffFileType.IMAGE | |
792 |
|
795 | |||
793 |
|
796 | |||
794 | # Copied from mercurial/patch.py |
|
797 | # Copied from mercurial/patch.py | |
795 | gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'} |
|
798 | gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'} | |
796 |
|
799 | |||
797 |
|
800 | |||
798 | def notutf8(fctx): |
|
801 | def notutf8(fctx): | |
799 | """detect non-UTF-8 text files since Phabricator requires them to be marked |
|
802 | """detect non-UTF-8 text files since Phabricator requires them to be marked | |
800 | as binary |
|
803 | as binary | |
801 | """ |
|
804 | """ | |
802 | try: |
|
805 | try: | |
803 | fctx.data().decode('utf-8') |
|
806 | fctx.data().decode('utf-8') | |
804 | return False |
|
807 | return False | |
805 | except UnicodeDecodeError: |
|
808 | except UnicodeDecodeError: | |
806 | fctx.repo().ui.write( |
|
809 | fctx.repo().ui.write( | |
807 | _(b'file %s detected as non-UTF-8, marked as binary\n') |
|
810 | _(b'file %s detected as non-UTF-8, marked as binary\n') | |
808 | % fctx.path() |
|
811 | % fctx.path() | |
809 | ) |
|
812 | ) | |
810 | return True |
|
813 | return True | |
811 |
|
814 | |||
812 |
|
815 | |||
813 | def addremoved(pdiff, ctx, removed): |
|
816 | def addremoved(pdiff, ctx, removed): | |
814 | """add removed files to the phabdiff. Shouldn't include moves""" |
|
817 | """add removed files to the phabdiff. Shouldn't include moves""" | |
815 | for fname in removed: |
|
818 | for fname in removed: | |
816 | pchange = phabchange( |
|
819 | pchange = phabchange( | |
817 | currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE |
|
820 | currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE | |
818 | ) |
|
821 | ) | |
819 | pchange.addoldmode(gitmode[ctx.p1()[fname].flags()]) |
|
822 | pchange.addoldmode(gitmode[ctx.p1()[fname].flags()]) | |
820 | oldfctx = ctx.p1()[fname] |
|
823 | oldfctx = ctx.p1()[fname] | |
821 | if not (oldfctx.isbinary() or notutf8(oldfctx)): |
|
824 | if not (oldfctx.isbinary() or notutf8(oldfctx)): | |
822 | maketext(pchange, ctx, fname) |
|
825 | maketext(pchange, ctx, fname) | |
823 |
|
826 | |||
824 | pdiff.addchange(pchange) |
|
827 | pdiff.addchange(pchange) | |
825 |
|
828 | |||
826 |
|
829 | |||
827 | def addmodified(pdiff, ctx, modified): |
|
830 | def addmodified(pdiff, ctx, modified): | |
828 | """add modified files to the phabdiff""" |
|
831 | """add modified files to the phabdiff""" | |
829 | for fname in modified: |
|
832 | for fname in modified: | |
830 | fctx = ctx[fname] |
|
833 | fctx = ctx[fname] | |
831 | oldfctx = fctx.p1() |
|
834 | oldfctx = fctx.p1() | |
832 | pchange = phabchange(currentPath=fname, oldPath=fname) |
|
835 | pchange = phabchange(currentPath=fname, oldPath=fname) | |
833 | filemode = gitmode[ctx[fname].flags()] |
|
836 | filemode = gitmode[ctx[fname].flags()] | |
834 | originalmode = gitmode[ctx.p1()[fname].flags()] |
|
837 | originalmode = gitmode[ctx.p1()[fname].flags()] | |
835 | if filemode != originalmode: |
|
838 | if filemode != originalmode: | |
836 | pchange.addoldmode(originalmode) |
|
839 | pchange.addoldmode(originalmode) | |
837 | pchange.addnewmode(filemode) |
|
840 | pchange.addnewmode(filemode) | |
838 |
|
841 | |||
839 | if ( |
|
842 | if ( | |
840 | fctx.isbinary() |
|
843 | fctx.isbinary() | |
841 | or notutf8(fctx) |
|
844 | or notutf8(fctx) | |
842 | or oldfctx.isbinary() |
|
845 | or oldfctx.isbinary() | |
843 | or notutf8(oldfctx) |
|
846 | or notutf8(oldfctx) | |
844 | ): |
|
847 | ): | |
845 | makebinary(pchange, fctx) |
|
848 | makebinary(pchange, fctx) | |
846 | addoldbinary(pchange, fctx.p1(), fctx) |
|
849 | addoldbinary(pchange, fctx.p1(), fctx) | |
847 | else: |
|
850 | else: | |
848 | maketext(pchange, ctx, fname) |
|
851 | maketext(pchange, ctx, fname) | |
849 |
|
852 | |||
850 | pdiff.addchange(pchange) |
|
853 | pdiff.addchange(pchange) | |
851 |
|
854 | |||
852 |
|
855 | |||
853 | def addadded(pdiff, ctx, added, removed): |
|
856 | def addadded(pdiff, ctx, added, removed): | |
854 | """add file adds to the phabdiff, both new files and copies/moves""" |
|
857 | """add file adds to the phabdiff, both new files and copies/moves""" | |
855 | # Keep track of files that've been recorded as moved/copied, so if there are |
|
858 | # Keep track of files that've been recorded as moved/copied, so if there are | |
856 | # additional copies we can mark them (moves get removed from removed) |
|
859 | # additional copies we can mark them (moves get removed from removed) | |
857 | copiedchanges = {} |
|
860 | copiedchanges = {} | |
858 | movedchanges = {} |
|
861 | movedchanges = {} | |
859 | for fname in added: |
|
862 | for fname in added: | |
860 | fctx = ctx[fname] |
|
863 | fctx = ctx[fname] | |
861 | oldfctx = None |
|
864 | oldfctx = None | |
862 | pchange = phabchange(currentPath=fname) |
|
865 | pchange = phabchange(currentPath=fname) | |
863 |
|
866 | |||
864 | filemode = gitmode[ctx[fname].flags()] |
|
867 | filemode = gitmode[ctx[fname].flags()] | |
865 | renamed = fctx.renamed() |
|
868 | renamed = fctx.renamed() | |
866 |
|
869 | |||
867 | if renamed: |
|
870 | if renamed: | |
868 | originalfname = renamed[0] |
|
871 | originalfname = renamed[0] | |
869 | oldfctx = ctx.p1()[originalfname] |
|
872 | oldfctx = ctx.p1()[originalfname] | |
870 | originalmode = gitmode[oldfctx.flags()] |
|
873 | originalmode = gitmode[oldfctx.flags()] | |
871 | pchange.oldPath = originalfname |
|
874 | pchange.oldPath = originalfname | |
872 |
|
875 | |||
873 | if originalfname in removed: |
|
876 | if originalfname in removed: | |
874 | origpchange = phabchange( |
|
877 | origpchange = phabchange( | |
875 | currentPath=originalfname, |
|
878 | currentPath=originalfname, | |
876 | oldPath=originalfname, |
|
879 | oldPath=originalfname, | |
877 | type=DiffChangeType.MOVE_AWAY, |
|
880 | type=DiffChangeType.MOVE_AWAY, | |
878 | awayPaths=[fname], |
|
881 | awayPaths=[fname], | |
879 | ) |
|
882 | ) | |
880 | movedchanges[originalfname] = origpchange |
|
883 | movedchanges[originalfname] = origpchange | |
881 | removed.remove(originalfname) |
|
884 | removed.remove(originalfname) | |
882 | pchange.type = DiffChangeType.MOVE_HERE |
|
885 | pchange.type = DiffChangeType.MOVE_HERE | |
883 | elif originalfname in movedchanges: |
|
886 | elif originalfname in movedchanges: | |
884 | movedchanges[originalfname].type = DiffChangeType.MULTICOPY |
|
887 | movedchanges[originalfname].type = DiffChangeType.MULTICOPY | |
885 | movedchanges[originalfname].awayPaths.append(fname) |
|
888 | movedchanges[originalfname].awayPaths.append(fname) | |
886 | pchange.type = DiffChangeType.COPY_HERE |
|
889 | pchange.type = DiffChangeType.COPY_HERE | |
887 | else: # pure copy |
|
890 | else: # pure copy | |
888 | if originalfname not in copiedchanges: |
|
891 | if originalfname not in copiedchanges: | |
889 | origpchange = phabchange( |
|
892 | origpchange = phabchange( | |
890 | currentPath=originalfname, type=DiffChangeType.COPY_AWAY |
|
893 | currentPath=originalfname, type=DiffChangeType.COPY_AWAY | |
891 | ) |
|
894 | ) | |
892 | copiedchanges[originalfname] = origpchange |
|
895 | copiedchanges[originalfname] = origpchange | |
893 | else: |
|
896 | else: | |
894 | origpchange = copiedchanges[originalfname] |
|
897 | origpchange = copiedchanges[originalfname] | |
895 | origpchange.awayPaths.append(fname) |
|
898 | origpchange.awayPaths.append(fname) | |
896 | pchange.type = DiffChangeType.COPY_HERE |
|
899 | pchange.type = DiffChangeType.COPY_HERE | |
897 |
|
900 | |||
898 | if filemode != originalmode: |
|
901 | if filemode != originalmode: | |
899 | pchange.addoldmode(originalmode) |
|
902 | pchange.addoldmode(originalmode) | |
900 | pchange.addnewmode(filemode) |
|
903 | pchange.addnewmode(filemode) | |
901 | else: # Brand-new file |
|
904 | else: # Brand-new file | |
902 | pchange.addnewmode(gitmode[fctx.flags()]) |
|
905 | pchange.addnewmode(gitmode[fctx.flags()]) | |
903 | pchange.type = DiffChangeType.ADD |
|
906 | pchange.type = DiffChangeType.ADD | |
904 |
|
907 | |||
905 | if ( |
|
908 | if ( | |
906 | fctx.isbinary() |
|
909 | fctx.isbinary() | |
907 | or notutf8(fctx) |
|
910 | or notutf8(fctx) | |
908 | or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx))) |
|
911 | or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx))) | |
909 | ): |
|
912 | ): | |
910 | makebinary(pchange, fctx) |
|
913 | makebinary(pchange, fctx) | |
911 | if renamed: |
|
914 | if renamed: | |
912 | addoldbinary(pchange, oldfctx, fctx) |
|
915 | addoldbinary(pchange, oldfctx, fctx) | |
913 | else: |
|
916 | else: | |
914 | maketext(pchange, ctx, fname) |
|
917 | maketext(pchange, ctx, fname) | |
915 |
|
918 | |||
916 | pdiff.addchange(pchange) |
|
919 | pdiff.addchange(pchange) | |
917 |
|
920 | |||
918 | for _path, copiedchange in copiedchanges.items(): |
|
921 | for _path, copiedchange in copiedchanges.items(): | |
919 | pdiff.addchange(copiedchange) |
|
922 | pdiff.addchange(copiedchange) | |
920 | for _path, movedchange in movedchanges.items(): |
|
923 | for _path, movedchange in movedchanges.items(): | |
921 | pdiff.addchange(movedchange) |
|
924 | pdiff.addchange(movedchange) | |
922 |
|
925 | |||
923 |
|
926 | |||
924 | def creatediff(ctx): |
|
927 | def creatediff(ctx): | |
925 | """create a Differential Diff""" |
|
928 | """create a Differential Diff""" | |
926 | repo = ctx.repo() |
|
929 | repo = ctx.repo() | |
927 | repophid = getrepophid(repo) |
|
930 | repophid = getrepophid(repo) | |
928 | # Create a "Differential Diff" via "differential.creatediff" API |
|
931 | # Create a "Differential Diff" via "differential.creatediff" API | |
929 | pdiff = phabdiff( |
|
932 | pdiff = phabdiff( | |
930 | sourceControlBaseRevision=b'%s' % ctx.p1().hex(), |
|
933 | sourceControlBaseRevision=b'%s' % ctx.p1().hex(), | |
931 | branch=b'%s' % ctx.branch(), |
|
934 | branch=b'%s' % ctx.branch(), | |
932 | ) |
|
935 | ) | |
933 | modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx) |
|
936 | modified, added, removed, _d, _u, _i, _c = ctx.p1().status(ctx) | |
934 | # addadded will remove moved files from removed, so addremoved won't get |
|
937 | # addadded will remove moved files from removed, so addremoved won't get | |
935 | # them |
|
938 | # them | |
936 | addadded(pdiff, ctx, added, removed) |
|
939 | addadded(pdiff, ctx, added, removed) | |
937 | addmodified(pdiff, ctx, modified) |
|
940 | addmodified(pdiff, ctx, modified) | |
938 | addremoved(pdiff, ctx, removed) |
|
941 | addremoved(pdiff, ctx, removed) | |
939 | if repophid: |
|
942 | if repophid: | |
940 | pdiff.repositoryPHID = repophid |
|
943 | pdiff.repositoryPHID = repophid | |
941 | diff = callconduit( |
|
944 | diff = callconduit( | |
942 | repo.ui, |
|
945 | repo.ui, | |
943 | b'differential.creatediff', |
|
946 | b'differential.creatediff', | |
944 | pycompat.byteskwargs(attr.asdict(pdiff)), |
|
947 | pycompat.byteskwargs(attr.asdict(pdiff)), | |
945 | ) |
|
948 | ) | |
946 | if not diff: |
|
949 | if not diff: | |
947 | raise error.Abort(_(b'cannot create diff for %s') % ctx) |
|
950 | raise error.Abort(_(b'cannot create diff for %s') % ctx) | |
948 | return diff |
|
951 | return diff | |
949 |
|
952 | |||
950 |
|
953 | |||
951 | def writediffproperties(ctx, diff): |
|
954 | def writediffproperties(ctx, diff): | |
952 | """write metadata to diff so patches could be applied losslessly""" |
|
955 | """write metadata to diff so patches could be applied losslessly""" | |
953 | # creatediff returns with a diffid but query returns with an id |
|
956 | # creatediff returns with a diffid but query returns with an id | |
954 | diffid = diff.get(b'diffid', diff.get(b'id')) |
|
957 | diffid = diff.get(b'diffid', diff.get(b'id')) | |
955 | params = { |
|
958 | params = { | |
956 | b'diff_id': diffid, |
|
959 | b'diff_id': diffid, | |
957 | b'name': b'hg:meta', |
|
960 | b'name': b'hg:meta', | |
958 | b'data': templatefilters.json( |
|
961 | b'data': templatefilters.json( | |
959 | { |
|
962 | { | |
960 | b'user': ctx.user(), |
|
963 | b'user': ctx.user(), | |
961 | b'date': b'%d %d' % ctx.date(), |
|
964 | b'date': b'%d %d' % ctx.date(), | |
962 | b'branch': ctx.branch(), |
|
965 | b'branch': ctx.branch(), | |
963 | b'node': ctx.hex(), |
|
966 | b'node': ctx.hex(), | |
964 | b'parent': ctx.p1().hex(), |
|
967 | b'parent': ctx.p1().hex(), | |
965 | } |
|
968 | } | |
966 | ), |
|
969 | ), | |
967 | } |
|
970 | } | |
968 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
|
971 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) | |
969 |
|
972 | |||
970 | params = { |
|
973 | params = { | |
971 | b'diff_id': diffid, |
|
974 | b'diff_id': diffid, | |
972 | b'name': b'local:commits', |
|
975 | b'name': b'local:commits', | |
973 | b'data': templatefilters.json( |
|
976 | b'data': templatefilters.json( | |
974 | { |
|
977 | { | |
975 | ctx.hex(): { |
|
978 | ctx.hex(): { | |
976 | b'author': stringutil.person(ctx.user()), |
|
979 | b'author': stringutil.person(ctx.user()), | |
977 | b'authorEmail': stringutil.email(ctx.user()), |
|
980 | b'authorEmail': stringutil.email(ctx.user()), | |
978 | b'time': int(ctx.date()[0]), |
|
981 | b'time': int(ctx.date()[0]), | |
979 | b'commit': ctx.hex(), |
|
982 | b'commit': ctx.hex(), | |
980 | b'parents': [ctx.p1().hex()], |
|
983 | b'parents': [ctx.p1().hex()], | |
981 | b'branch': ctx.branch(), |
|
984 | b'branch': ctx.branch(), | |
982 | }, |
|
985 | }, | |
983 | } |
|
986 | } | |
984 | ), |
|
987 | ), | |
985 | } |
|
988 | } | |
986 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
|
989 | callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) | |
987 |
|
990 | |||
988 |
|
991 | |||
989 | def createdifferentialrevision( |
|
992 | def createdifferentialrevision( | |
990 | ctx, |
|
993 | ctx, | |
991 | revid=None, |
|
994 | revid=None, | |
992 | parentrevphid=None, |
|
995 | parentrevphid=None, | |
993 | oldnode=None, |
|
996 | oldnode=None, | |
994 | olddiff=None, |
|
997 | olddiff=None, | |
995 | actions=None, |
|
998 | actions=None, | |
996 | comment=None, |
|
999 | comment=None, | |
997 | ): |
|
1000 | ): | |
998 | """create or update a Differential Revision |
|
1001 | """create or update a Differential Revision | |
999 |
|
1002 | |||
1000 | If revid is None, create a new Differential Revision, otherwise update |
|
1003 | If revid is None, create a new Differential Revision, otherwise update | |
1001 | revid. If parentrevphid is not None, set it as a dependency. |
|
1004 | revid. If parentrevphid is not None, set it as a dependency. | |
1002 |
|
1005 | |||
1003 | If oldnode is not None, check if the patch content (without commit message |
|
1006 | If oldnode is not None, check if the patch content (without commit message | |
1004 | and metadata) has changed before creating another diff. |
|
1007 | and metadata) has changed before creating another diff. | |
1005 |
|
1008 | |||
1006 | If actions is not None, they will be appended to the transaction. |
|
1009 | If actions is not None, they will be appended to the transaction. | |
1007 | """ |
|
1010 | """ | |
1008 | repo = ctx.repo() |
|
1011 | repo = ctx.repo() | |
1009 | if oldnode: |
|
1012 | if oldnode: | |
1010 | diffopts = mdiff.diffopts(git=True, context=32767) |
|
1013 | diffopts = mdiff.diffopts(git=True, context=32767) | |
1011 | oldctx = repo.unfiltered()[oldnode] |
|
1014 | oldctx = repo.unfiltered()[oldnode] | |
1012 | neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts) |
|
1015 | neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts) | |
1013 | else: |
|
1016 | else: | |
1014 | neednewdiff = True |
|
1017 | neednewdiff = True | |
1015 |
|
1018 | |||
1016 | transactions = [] |
|
1019 | transactions = [] | |
1017 | if neednewdiff: |
|
1020 | if neednewdiff: | |
1018 | diff = creatediff(ctx) |
|
1021 | diff = creatediff(ctx) | |
1019 | transactions.append({b'type': b'update', b'value': diff[b'phid']}) |
|
1022 | transactions.append({b'type': b'update', b'value': diff[b'phid']}) | |
1020 | if comment: |
|
1023 | if comment: | |
1021 | transactions.append({b'type': b'comment', b'value': comment}) |
|
1024 | transactions.append({b'type': b'comment', b'value': comment}) | |
1022 | else: |
|
1025 | else: | |
1023 | # Even if we don't need to upload a new diff because the patch content |
|
1026 | # Even if we don't need to upload a new diff because the patch content | |
1024 | # does not change. We might still need to update its metadata so |
|
1027 | # does not change. We might still need to update its metadata so | |
1025 | # pushers could know the correct node metadata. |
|
1028 | # pushers could know the correct node metadata. | |
1026 | assert olddiff |
|
1029 | assert olddiff | |
1027 | diff = olddiff |
|
1030 | diff = olddiff | |
1028 | writediffproperties(ctx, diff) |
|
1031 | writediffproperties(ctx, diff) | |
1029 |
|
1032 | |||
1030 | # Set the parent Revision every time, so commit re-ordering is picked-up |
|
1033 | # Set the parent Revision every time, so commit re-ordering is picked-up | |
1031 | if parentrevphid: |
|
1034 | if parentrevphid: | |
1032 | transactions.append( |
|
1035 | transactions.append( | |
1033 | {b'type': b'parents.set', b'value': [parentrevphid]} |
|
1036 | {b'type': b'parents.set', b'value': [parentrevphid]} | |
1034 | ) |
|
1037 | ) | |
1035 |
|
1038 | |||
1036 | if actions: |
|
1039 | if actions: | |
1037 | transactions += actions |
|
1040 | transactions += actions | |
1038 |
|
1041 | |||
1039 | # Parse commit message and update related fields. |
|
1042 | # Parse commit message and update related fields. | |
1040 | desc = ctx.description() |
|
1043 | desc = ctx.description() | |
1041 | info = callconduit( |
|
1044 | info = callconduit( | |
1042 | repo.ui, b'differential.parsecommitmessage', {b'corpus': desc} |
|
1045 | repo.ui, b'differential.parsecommitmessage', {b'corpus': desc} | |
1043 | ) |
|
1046 | ) | |
1044 | for k, v in info[b'fields'].items(): |
|
1047 | for k, v in info[b'fields'].items(): | |
1045 | if k in [b'title', b'summary', b'testPlan']: |
|
1048 | if k in [b'title', b'summary', b'testPlan']: | |
1046 | transactions.append({b'type': k, b'value': v}) |
|
1049 | transactions.append({b'type': k, b'value': v}) | |
1047 |
|
1050 | |||
1048 | params = {b'transactions': transactions} |
|
1051 | params = {b'transactions': transactions} | |
1049 | if revid is not None: |
|
1052 | if revid is not None: | |
1050 | # Update an existing Differential Revision |
|
1053 | # Update an existing Differential Revision | |
1051 | params[b'objectIdentifier'] = revid |
|
1054 | params[b'objectIdentifier'] = revid | |
1052 |
|
1055 | |||
1053 | revision = callconduit(repo.ui, b'differential.revision.edit', params) |
|
1056 | revision = callconduit(repo.ui, b'differential.revision.edit', params) | |
1054 | if not revision: |
|
1057 | if not revision: | |
1055 | raise error.Abort(_(b'cannot create revision for %s') % ctx) |
|
1058 | raise error.Abort(_(b'cannot create revision for %s') % ctx) | |
1056 |
|
1059 | |||
1057 | return revision, diff |
|
1060 | return revision, diff | |
1058 |
|
1061 | |||
1059 |
|
1062 | |||
1060 | def userphids(ui, names): |
|
1063 | def userphids(ui, names): | |
1061 | """convert user names to PHIDs""" |
|
1064 | """convert user names to PHIDs""" | |
1062 | names = [name.lower() for name in names] |
|
1065 | names = [name.lower() for name in names] | |
1063 | query = {b'constraints': {b'usernames': names}} |
|
1066 | query = {b'constraints': {b'usernames': names}} | |
1064 | result = callconduit(ui, b'user.search', query) |
|
1067 | result = callconduit(ui, b'user.search', query) | |
1065 | # username not found is not an error of the API. So check if we have missed |
|
1068 | # username not found is not an error of the API. So check if we have missed | |
1066 | # some names here. |
|
1069 | # some names here. | |
1067 | data = result[b'data'] |
|
1070 | data = result[b'data'] | |
1068 | resolved = {entry[b'fields'][b'username'].lower() for entry in data} |
|
1071 | resolved = {entry[b'fields'][b'username'].lower() for entry in data} | |
1069 | unresolved = set(names) - resolved |
|
1072 | unresolved = set(names) - resolved | |
1070 | if unresolved: |
|
1073 | if unresolved: | |
1071 | raise error.Abort( |
|
1074 | raise error.Abort( | |
1072 | _(b'unknown username: %s') % b' '.join(sorted(unresolved)) |
|
1075 | _(b'unknown username: %s') % b' '.join(sorted(unresolved)) | |
1073 | ) |
|
1076 | ) | |
1074 | return [entry[b'phid'] for entry in data] |
|
1077 | return [entry[b'phid'] for entry in data] | |
1075 |
|
1078 | |||
1076 |
|
1079 | |||
1077 | @vcrcommand( |
|
1080 | @vcrcommand( | |
1078 | b'phabsend', |
|
1081 | b'phabsend', | |
1079 | [ |
|
1082 | [ | |
1080 | (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
|
1083 | (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), | |
1081 | (b'', b'amend', True, _(b'update commit messages')), |
|
1084 | (b'', b'amend', True, _(b'update commit messages')), | |
1082 | (b'', b'reviewer', [], _(b'specify reviewers')), |
|
1085 | (b'', b'reviewer', [], _(b'specify reviewers')), | |
1083 | (b'', b'blocker', [], _(b'specify blocking reviewers')), |
|
1086 | (b'', b'blocker', [], _(b'specify blocking reviewers')), | |
1084 | ( |
|
1087 | ( | |
1085 | b'm', |
|
1088 | b'm', | |
1086 | b'comment', |
|
1089 | b'comment', | |
1087 | b'', |
|
1090 | b'', | |
1088 | _(b'add a comment to Revisions with new/updated Diffs'), |
|
1091 | _(b'add a comment to Revisions with new/updated Diffs'), | |
1089 | ), |
|
1092 | ), | |
1090 | (b'', b'confirm', None, _(b'ask for confirmation before sending')), |
|
1093 | (b'', b'confirm', None, _(b'ask for confirmation before sending')), | |
1091 | ], |
|
1094 | ], | |
1092 | _(b'REV [OPTIONS]'), |
|
1095 | _(b'REV [OPTIONS]'), | |
1093 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1096 | helpcategory=command.CATEGORY_IMPORT_EXPORT, | |
1094 | ) |
|
1097 | ) | |
1095 | def phabsend(ui, repo, *revs, **opts): |
|
1098 | def phabsend(ui, repo, *revs, **opts): | |
1096 | """upload changesets to Phabricator |
|
1099 | """upload changesets to Phabricator | |
1097 |
|
1100 | |||
1098 | If there are multiple revisions specified, they will be send as a stack |
|
1101 | If there are multiple revisions specified, they will be send as a stack | |
1099 | with a linear dependencies relationship using the order specified by the |
|
1102 | with a linear dependencies relationship using the order specified by the | |
1100 | revset. |
|
1103 | revset. | |
1101 |
|
1104 | |||
1102 | For the first time uploading changesets, local tags will be created to |
|
1105 | For the first time uploading changesets, local tags will be created to | |
1103 | maintain the association. After the first time, phabsend will check |
|
1106 | maintain the association. After the first time, phabsend will check | |
1104 | obsstore and tags information so it can figure out whether to update an |
|
1107 | obsstore and tags information so it can figure out whether to update an | |
1105 | existing Differential Revision, or create a new one. |
|
1108 | existing Differential Revision, or create a new one. | |
1106 |
|
1109 | |||
1107 | If --amend is set, update commit messages so they have the |
|
1110 | If --amend is set, update commit messages so they have the | |
1108 | ``Differential Revision`` URL, remove related tags. This is similar to what |
|
1111 | ``Differential Revision`` URL, remove related tags. This is similar to what | |
1109 | arcanist will do, and is more desired in author-push workflows. Otherwise, |
|
1112 | arcanist will do, and is more desired in author-push workflows. Otherwise, | |
1110 | use local tags to record the ``Differential Revision`` association. |
|
1113 | use local tags to record the ``Differential Revision`` association. | |
1111 |
|
1114 | |||
1112 | The --confirm option lets you confirm changesets before sending them. You |
|
1115 | The --confirm option lets you confirm changesets before sending them. You | |
1113 | can also add following to your configuration file to make it default |
|
1116 | can also add following to your configuration file to make it default | |
1114 | behaviour:: |
|
1117 | behaviour:: | |
1115 |
|
1118 | |||
1116 | [phabsend] |
|
1119 | [phabsend] | |
1117 | confirm = true |
|
1120 | confirm = true | |
1118 |
|
1121 | |||
1119 | phabsend will check obsstore and the above association to decide whether to |
|
1122 | phabsend will check obsstore and the above association to decide whether to | |
1120 | update an existing Differential Revision, or create a new one. |
|
1123 | update an existing Differential Revision, or create a new one. | |
1121 | """ |
|
1124 | """ | |
1122 | opts = pycompat.byteskwargs(opts) |
|
1125 | opts = pycompat.byteskwargs(opts) | |
1123 | revs = list(revs) + opts.get(b'rev', []) |
|
1126 | revs = list(revs) + opts.get(b'rev', []) | |
1124 | revs = scmutil.revrange(repo, revs) |
|
1127 | revs = scmutil.revrange(repo, revs) | |
1125 | revs.sort() # ascending order to preserve topological parent/child in phab |
|
1128 | revs.sort() # ascending order to preserve topological parent/child in phab | |
1126 |
|
1129 | |||
1127 | if not revs: |
|
1130 | if not revs: | |
1128 | raise error.Abort(_(b'phabsend requires at least one changeset')) |
|
1131 | raise error.Abort(_(b'phabsend requires at least one changeset')) | |
1129 | if opts.get(b'amend'): |
|
1132 | if opts.get(b'amend'): | |
1130 | cmdutil.checkunfinished(repo) |
|
1133 | cmdutil.checkunfinished(repo) | |
1131 |
|
1134 | |||
1132 | # {newnode: (oldnode, olddiff, olddrev} |
|
1135 | # {newnode: (oldnode, olddiff, olddrev} | |
1133 | oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
|
1136 | oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) | |
1134 |
|
1137 | |||
1135 | confirm = ui.configbool(b'phabsend', b'confirm') |
|
1138 | confirm = ui.configbool(b'phabsend', b'confirm') | |
1136 | confirm |= bool(opts.get(b'confirm')) |
|
1139 | confirm |= bool(opts.get(b'confirm')) | |
1137 | if confirm: |
|
1140 | if confirm: | |
1138 | confirmed = _confirmbeforesend(repo, revs, oldmap) |
|
1141 | confirmed = _confirmbeforesend(repo, revs, oldmap) | |
1139 | if not confirmed: |
|
1142 | if not confirmed: | |
1140 | raise error.Abort(_(b'phabsend cancelled')) |
|
1143 | raise error.Abort(_(b'phabsend cancelled')) | |
1141 |
|
1144 | |||
1142 | actions = [] |
|
1145 | actions = [] | |
1143 | reviewers = opts.get(b'reviewer', []) |
|
1146 | reviewers = opts.get(b'reviewer', []) | |
1144 | blockers = opts.get(b'blocker', []) |
|
1147 | blockers = opts.get(b'blocker', []) | |
1145 | phids = [] |
|
1148 | phids = [] | |
1146 | if reviewers: |
|
1149 | if reviewers: | |
1147 | phids.extend(userphids(repo.ui, reviewers)) |
|
1150 | phids.extend(userphids(repo.ui, reviewers)) | |
1148 | if blockers: |
|
1151 | if blockers: | |
1149 | phids.extend( |
|
1152 | phids.extend( | |
1150 | map( |
|
1153 | map( | |
1151 | lambda phid: b'blocking(%s)' % phid, |
|
1154 | lambda phid: b'blocking(%s)' % phid, | |
1152 | userphids(repo.ui, blockers), |
|
1155 | userphids(repo.ui, blockers), | |
1153 | ) |
|
1156 | ) | |
1154 | ) |
|
1157 | ) | |
1155 | if phids: |
|
1158 | if phids: | |
1156 | actions.append({b'type': b'reviewers.add', b'value': phids}) |
|
1159 | actions.append({b'type': b'reviewers.add', b'value': phids}) | |
1157 |
|
1160 | |||
1158 | drevids = [] # [int] |
|
1161 | drevids = [] # [int] | |
1159 | diffmap = {} # {newnode: diff} |
|
1162 | diffmap = {} # {newnode: diff} | |
1160 |
|
1163 | |||
1161 | # Send patches one by one so we know their Differential Revision PHIDs and |
|
1164 | # Send patches one by one so we know their Differential Revision PHIDs and | |
1162 | # can provide dependency relationship |
|
1165 | # can provide dependency relationship | |
1163 | lastrevphid = None |
|
1166 | lastrevphid = None | |
1164 | for rev in revs: |
|
1167 | for rev in revs: | |
1165 | ui.debug(b'sending rev %d\n' % rev) |
|
1168 | ui.debug(b'sending rev %d\n' % rev) | |
1166 | ctx = repo[rev] |
|
1169 | ctx = repo[rev] | |
1167 |
|
1170 | |||
1168 | # Get Differential Revision ID |
|
1171 | # Get Differential Revision ID | |
1169 | oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
|
1172 | oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) | |
1170 | if oldnode != ctx.node() or opts.get(b'amend'): |
|
1173 | if oldnode != ctx.node() or opts.get(b'amend'): | |
1171 | # Create or update Differential Revision |
|
1174 | # Create or update Differential Revision | |
1172 | revision, diff = createdifferentialrevision( |
|
1175 | revision, diff = createdifferentialrevision( | |
1173 | ctx, |
|
1176 | ctx, | |
1174 | revid, |
|
1177 | revid, | |
1175 | lastrevphid, |
|
1178 | lastrevphid, | |
1176 | oldnode, |
|
1179 | oldnode, | |
1177 | olddiff, |
|
1180 | olddiff, | |
1178 | actions, |
|
1181 | actions, | |
1179 | opts.get(b'comment'), |
|
1182 | opts.get(b'comment'), | |
1180 | ) |
|
1183 | ) | |
1181 | diffmap[ctx.node()] = diff |
|
1184 | diffmap[ctx.node()] = diff | |
1182 | newrevid = int(revision[b'object'][b'id']) |
|
1185 | newrevid = int(revision[b'object'][b'id']) | |
1183 | newrevphid = revision[b'object'][b'phid'] |
|
1186 | newrevphid = revision[b'object'][b'phid'] | |
1184 | if revid: |
|
1187 | if revid: | |
1185 | action = b'updated' |
|
1188 | action = b'updated' | |
1186 | else: |
|
1189 | else: | |
1187 | action = b'created' |
|
1190 | action = b'created' | |
1188 |
|
1191 | |||
1189 | # Create a local tag to note the association, if commit message |
|
1192 | # Create a local tag to note the association, if commit message | |
1190 | # does not have it already |
|
1193 | # does not have it already | |
1191 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1194 | m = _differentialrevisiondescre.search(ctx.description()) | |
1192 | if not m or int(m.group('id')) != newrevid: |
|
1195 | if not m or int(m.group('id')) != newrevid: | |
1193 | tagname = b'D%d' % newrevid |
|
1196 | tagname = b'D%d' % newrevid | |
1194 | tags.tag( |
|
1197 | tags.tag( | |
1195 | repo, |
|
1198 | repo, | |
1196 | tagname, |
|
1199 | tagname, | |
1197 | ctx.node(), |
|
1200 | ctx.node(), | |
1198 | message=None, |
|
1201 | message=None, | |
1199 | user=None, |
|
1202 | user=None, | |
1200 | date=None, |
|
1203 | date=None, | |
1201 | local=True, |
|
1204 | local=True, | |
1202 | ) |
|
1205 | ) | |
1203 | else: |
|
1206 | else: | |
1204 | # Nothing changed. But still set "newrevphid" so the next revision |
|
1207 | # Nothing changed. But still set "newrevphid" so the next revision | |
1205 | # could depend on this one and "newrevid" for the summary line. |
|
1208 | # could depend on this one and "newrevid" for the summary line. | |
1206 | newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid'] |
|
1209 | newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid'] | |
1207 | newrevid = revid |
|
1210 | newrevid = revid | |
1208 | action = b'skipped' |
|
1211 | action = b'skipped' | |
1209 |
|
1212 | |||
1210 | actiondesc = ui.label( |
|
1213 | actiondesc = ui.label( | |
1211 | { |
|
1214 | { | |
1212 | b'created': _(b'created'), |
|
1215 | b'created': _(b'created'), | |
1213 | b'skipped': _(b'skipped'), |
|
1216 | b'skipped': _(b'skipped'), | |
1214 | b'updated': _(b'updated'), |
|
1217 | b'updated': _(b'updated'), | |
1215 | }[action], |
|
1218 | }[action], | |
1216 | b'phabricator.action.%s' % action, |
|
1219 | b'phabricator.action.%s' % action, | |
1217 | ) |
|
1220 | ) | |
1218 | drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') |
|
1221 | drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') | |
1219 | nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
|
1222 | nodedesc = ui.label(bytes(ctx), b'phabricator.node') | |
1220 | desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
|
1223 | desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') | |
1221 | ui.write( |
|
1224 | ui.write( | |
1222 | _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc) |
|
1225 | _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc) | |
1223 | ) |
|
1226 | ) | |
1224 | drevids.append(newrevid) |
|
1227 | drevids.append(newrevid) | |
1225 | lastrevphid = newrevphid |
|
1228 | lastrevphid = newrevphid | |
1226 |
|
1229 | |||
1227 | # Update commit messages and remove tags |
|
1230 | # Update commit messages and remove tags | |
1228 | if opts.get(b'amend'): |
|
1231 | if opts.get(b'amend'): | |
1229 | unfi = repo.unfiltered() |
|
1232 | unfi = repo.unfiltered() | |
1230 | drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) |
|
1233 | drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) | |
1231 | with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
|
1234 | with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): | |
1232 | wnode = unfi[b'.'].node() |
|
1235 | wnode = unfi[b'.'].node() | |
1233 | mapping = {} # {oldnode: [newnode]} |
|
1236 | mapping = {} # {oldnode: [newnode]} | |
1234 | for i, rev in enumerate(revs): |
|
1237 | for i, rev in enumerate(revs): | |
1235 | old = unfi[rev] |
|
1238 | old = unfi[rev] | |
1236 | drevid = drevids[i] |
|
1239 | drevid = drevids[i] | |
1237 | drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
|
1240 | drev = [d for d in drevs if int(d[b'id']) == drevid][0] | |
1238 | newdesc = getdescfromdrev(drev) |
|
1241 | newdesc = getdescfromdrev(drev) | |
1239 | # Make sure commit message contain "Differential Revision" |
|
1242 | # Make sure commit message contain "Differential Revision" | |
1240 | if old.description() != newdesc: |
|
1243 | if old.description() != newdesc: | |
1241 | if old.phase() == phases.public: |
|
1244 | if old.phase() == phases.public: | |
1242 | ui.warn( |
|
1245 | ui.warn( | |
1243 | _(b"warning: not updating public commit %s\n") |
|
1246 | _(b"warning: not updating public commit %s\n") | |
1244 | % scmutil.formatchangeid(old) |
|
1247 | % scmutil.formatchangeid(old) | |
1245 | ) |
|
1248 | ) | |
1246 | continue |
|
1249 | continue | |
1247 | parents = [ |
|
1250 | parents = [ | |
1248 | mapping.get(old.p1().node(), (old.p1(),))[0], |
|
1251 | mapping.get(old.p1().node(), (old.p1(),))[0], | |
1249 | mapping.get(old.p2().node(), (old.p2(),))[0], |
|
1252 | mapping.get(old.p2().node(), (old.p2(),))[0], | |
1250 | ] |
|
1253 | ] | |
1251 | new = context.metadataonlyctx( |
|
1254 | new = context.metadataonlyctx( | |
1252 | repo, |
|
1255 | repo, | |
1253 | old, |
|
1256 | old, | |
1254 | parents=parents, |
|
1257 | parents=parents, | |
1255 | text=newdesc, |
|
1258 | text=newdesc, | |
1256 | user=old.user(), |
|
1259 | user=old.user(), | |
1257 | date=old.date(), |
|
1260 | date=old.date(), | |
1258 | extra=old.extra(), |
|
1261 | extra=old.extra(), | |
1259 | ) |
|
1262 | ) | |
1260 |
|
1263 | |||
1261 | newnode = new.commit() |
|
1264 | newnode = new.commit() | |
1262 |
|
1265 | |||
1263 | mapping[old.node()] = [newnode] |
|
1266 | mapping[old.node()] = [newnode] | |
1264 | # Update diff property |
|
1267 | # Update diff property | |
1265 | # If it fails just warn and keep going, otherwise the DREV |
|
1268 | # If it fails just warn and keep going, otherwise the DREV | |
1266 | # associations will be lost |
|
1269 | # associations will be lost | |
1267 | try: |
|
1270 | try: | |
1268 | writediffproperties(unfi[newnode], diffmap[old.node()]) |
|
1271 | writediffproperties(unfi[newnode], diffmap[old.node()]) | |
1269 | except util.urlerr.urlerror: |
|
1272 | except util.urlerr.urlerror: | |
1270 | ui.warnnoi18n( |
|
1273 | ui.warnnoi18n( | |
1271 | b'Failed to update metadata for D%d\n' % drevid |
|
1274 | b'Failed to update metadata for D%d\n' % drevid | |
1272 | ) |
|
1275 | ) | |
1273 | # Remove local tags since it's no longer necessary |
|
1276 | # Remove local tags since it's no longer necessary | |
1274 | tagname = b'D%d' % drevid |
|
1277 | tagname = b'D%d' % drevid | |
1275 | if tagname in repo.tags(): |
|
1278 | if tagname in repo.tags(): | |
1276 | tags.tag( |
|
1279 | tags.tag( | |
1277 | repo, |
|
1280 | repo, | |
1278 | tagname, |
|
1281 | tagname, | |
1279 | nullid, |
|
1282 | nullid, | |
1280 | message=None, |
|
1283 | message=None, | |
1281 | user=None, |
|
1284 | user=None, | |
1282 | date=None, |
|
1285 | date=None, | |
1283 | local=True, |
|
1286 | local=True, | |
1284 | ) |
|
1287 | ) | |
1285 | scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
|
1288 | scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) | |
1286 | if wnode in mapping: |
|
1289 | if wnode in mapping: | |
1287 | unfi.setparents(mapping[wnode][0]) |
|
1290 | unfi.setparents(mapping[wnode][0]) | |
1288 |
|
1291 | |||
1289 |
|
1292 | |||
1290 | # Map from "hg:meta" keys to header understood by "hg import". The order is |
|
1293 | # Map from "hg:meta" keys to header understood by "hg import". The order is | |
1291 | # consistent with "hg export" output. |
|
1294 | # consistent with "hg export" output. | |
1292 | _metanamemap = util.sortdict( |
|
1295 | _metanamemap = util.sortdict( | |
1293 | [ |
|
1296 | [ | |
1294 | (b'user', b'User'), |
|
1297 | (b'user', b'User'), | |
1295 | (b'date', b'Date'), |
|
1298 | (b'date', b'Date'), | |
1296 | (b'branch', b'Branch'), |
|
1299 | (b'branch', b'Branch'), | |
1297 | (b'node', b'Node ID'), |
|
1300 | (b'node', b'Node ID'), | |
1298 | (b'parent', b'Parent '), |
|
1301 | (b'parent', b'Parent '), | |
1299 | ] |
|
1302 | ] | |
1300 | ) |
|
1303 | ) | |
1301 |
|
1304 | |||
1302 |
|
1305 | |||
1303 | def _confirmbeforesend(repo, revs, oldmap): |
|
1306 | def _confirmbeforesend(repo, revs, oldmap): | |
1304 | url, token = readurltoken(repo.ui) |
|
1307 | url, token = readurltoken(repo.ui) | |
1305 | ui = repo.ui |
|
1308 | ui = repo.ui | |
1306 | for rev in revs: |
|
1309 | for rev in revs: | |
1307 | ctx = repo[rev] |
|
1310 | ctx = repo[rev] | |
1308 | desc = ctx.description().splitlines()[0] |
|
1311 | desc = ctx.description().splitlines()[0] | |
1309 | oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
|
1312 | oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) | |
1310 | if drevid: |
|
1313 | if drevid: | |
1311 | drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev') |
|
1314 | drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev') | |
1312 | else: |
|
1315 | else: | |
1313 | drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
|
1316 | drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') | |
1314 |
|
1317 | |||
1315 | ui.write( |
|
1318 | ui.write( | |
1316 | _(b'%s - %s: %s\n') |
|
1319 | _(b'%s - %s: %s\n') | |
1317 | % ( |
|
1320 | % ( | |
1318 | drevdesc, |
|
1321 | drevdesc, | |
1319 | ui.label(bytes(ctx), b'phabricator.node'), |
|
1322 | ui.label(bytes(ctx), b'phabricator.node'), | |
1320 | ui.label(desc, b'phabricator.desc'), |
|
1323 | ui.label(desc, b'phabricator.desc'), | |
1321 | ) |
|
1324 | ) | |
1322 | ) |
|
1325 | ) | |
1323 |
|
1326 | |||
1324 | if ui.promptchoice( |
|
1327 | if ui.promptchoice( | |
1325 | _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url |
|
1328 | _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url | |
1326 | ): |
|
1329 | ): | |
1327 | return False |
|
1330 | return False | |
1328 |
|
1331 | |||
1329 | return True |
|
1332 | return True | |
1330 |
|
1333 | |||
1331 |
|
1334 | |||
1332 | _knownstatusnames = { |
|
1335 | _knownstatusnames = { | |
1333 | b'accepted', |
|
1336 | b'accepted', | |
1334 | b'needsreview', |
|
1337 | b'needsreview', | |
1335 | b'needsrevision', |
|
1338 | b'needsrevision', | |
1336 | b'closed', |
|
1339 | b'closed', | |
1337 | b'abandoned', |
|
1340 | b'abandoned', | |
1338 | b'changesplanned', |
|
1341 | b'changesplanned', | |
1339 | } |
|
1342 | } | |
1340 |
|
1343 | |||
1341 |
|
1344 | |||
1342 | def _getstatusname(drev): |
|
1345 | def _getstatusname(drev): | |
1343 | """get normalized status name from a Differential Revision""" |
|
1346 | """get normalized status name from a Differential Revision""" | |
1344 | return drev[b'statusName'].replace(b' ', b'').lower() |
|
1347 | return drev[b'statusName'].replace(b' ', b'').lower() | |
1345 |
|
1348 | |||
1346 |
|
1349 | |||
1347 | # Small language to specify differential revisions. Support symbols: (), :X, |
|
1350 | # Small language to specify differential revisions. Support symbols: (), :X, | |
1348 | # +, and -. |
|
1351 | # +, and -. | |
1349 |
|
1352 | |||
1350 | _elements = { |
|
1353 | _elements = { | |
1351 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
1354 | # token-type: binding-strength, primary, prefix, infix, suffix | |
1352 | b'(': (12, None, (b'group', 1, b')'), None, None), |
|
1355 | b'(': (12, None, (b'group', 1, b')'), None, None), | |
1353 | b':': (8, None, (b'ancestors', 8), None, None), |
|
1356 | b':': (8, None, (b'ancestors', 8), None, None), | |
1354 | b'&': (5, None, None, (b'and_', 5), None), |
|
1357 | b'&': (5, None, None, (b'and_', 5), None), | |
1355 | b'+': (4, None, None, (b'add', 4), None), |
|
1358 | b'+': (4, None, None, (b'add', 4), None), | |
1356 | b'-': (4, None, None, (b'sub', 4), None), |
|
1359 | b'-': (4, None, None, (b'sub', 4), None), | |
1357 | b')': (0, None, None, None, None), |
|
1360 | b')': (0, None, None, None, None), | |
1358 | b'symbol': (0, b'symbol', None, None, None), |
|
1361 | b'symbol': (0, b'symbol', None, None, None), | |
1359 | b'end': (0, None, None, None, None), |
|
1362 | b'end': (0, None, None, None, None), | |
1360 | } |
|
1363 | } | |
1361 |
|
1364 | |||
1362 |
|
1365 | |||
1363 | def _tokenize(text): |
|
1366 | def _tokenize(text): | |
1364 | view = memoryview(text) # zero-copy slice |
|
1367 | view = memoryview(text) # zero-copy slice | |
1365 | special = b'():+-& ' |
|
1368 | special = b'():+-& ' | |
1366 | pos = 0 |
|
1369 | pos = 0 | |
1367 | length = len(text) |
|
1370 | length = len(text) | |
1368 | while pos < length: |
|
1371 | while pos < length: | |
1369 | symbol = b''.join( |
|
1372 | symbol = b''.join( | |
1370 | itertools.takewhile( |
|
1373 | itertools.takewhile( | |
1371 | lambda ch: ch not in special, pycompat.iterbytestr(view[pos:]) |
|
1374 | lambda ch: ch not in special, pycompat.iterbytestr(view[pos:]) | |
1372 | ) |
|
1375 | ) | |
1373 | ) |
|
1376 | ) | |
1374 | if symbol: |
|
1377 | if symbol: | |
1375 | yield (b'symbol', symbol, pos) |
|
1378 | yield (b'symbol', symbol, pos) | |
1376 | pos += len(symbol) |
|
1379 | pos += len(symbol) | |
1377 | else: # special char, ignore space |
|
1380 | else: # special char, ignore space | |
1378 | if text[pos : pos + 1] != b' ': |
|
1381 | if text[pos : pos + 1] != b' ': | |
1379 | yield (text[pos : pos + 1], None, pos) |
|
1382 | yield (text[pos : pos + 1], None, pos) | |
1380 | pos += 1 |
|
1383 | pos += 1 | |
1381 | yield (b'end', None, pos) |
|
1384 | yield (b'end', None, pos) | |
1382 |
|
1385 | |||
1383 |
|
1386 | |||
1384 | def _parse(text): |
|
1387 | def _parse(text): | |
1385 | tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
|
1388 | tree, pos = parser.parser(_elements).parse(_tokenize(text)) | |
1386 | if pos != len(text): |
|
1389 | if pos != len(text): | |
1387 | raise error.ParseError(b'invalid token', pos) |
|
1390 | raise error.ParseError(b'invalid token', pos) | |
1388 | return tree |
|
1391 | return tree | |
1389 |
|
1392 | |||
1390 |
|
1393 | |||
1391 | def _parsedrev(symbol): |
|
1394 | def _parsedrev(symbol): | |
1392 | """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
|
1395 | """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" | |
1393 | if symbol.startswith(b'D') and symbol[1:].isdigit(): |
|
1396 | if symbol.startswith(b'D') and symbol[1:].isdigit(): | |
1394 | return int(symbol[1:]) |
|
1397 | return int(symbol[1:]) | |
1395 | if symbol.isdigit(): |
|
1398 | if symbol.isdigit(): | |
1396 | return int(symbol) |
|
1399 | return int(symbol) | |
1397 |
|
1400 | |||
1398 |
|
1401 | |||
1399 | def _prefetchdrevs(tree): |
|
1402 | def _prefetchdrevs(tree): | |
1400 | """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
|
1403 | """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" | |
1401 | drevs = set() |
|
1404 | drevs = set() | |
1402 | ancestordrevs = set() |
|
1405 | ancestordrevs = set() | |
1403 | op = tree[0] |
|
1406 | op = tree[0] | |
1404 | if op == b'symbol': |
|
1407 | if op == b'symbol': | |
1405 | r = _parsedrev(tree[1]) |
|
1408 | r = _parsedrev(tree[1]) | |
1406 | if r: |
|
1409 | if r: | |
1407 | drevs.add(r) |
|
1410 | drevs.add(r) | |
1408 | elif op == b'ancestors': |
|
1411 | elif op == b'ancestors': | |
1409 | r, a = _prefetchdrevs(tree[1]) |
|
1412 | r, a = _prefetchdrevs(tree[1]) | |
1410 | drevs.update(r) |
|
1413 | drevs.update(r) | |
1411 | ancestordrevs.update(r) |
|
1414 | ancestordrevs.update(r) | |
1412 | ancestordrevs.update(a) |
|
1415 | ancestordrevs.update(a) | |
1413 | else: |
|
1416 | else: | |
1414 | for t in tree[1:]: |
|
1417 | for t in tree[1:]: | |
1415 | r, a = _prefetchdrevs(t) |
|
1418 | r, a = _prefetchdrevs(t) | |
1416 | drevs.update(r) |
|
1419 | drevs.update(r) | |
1417 | ancestordrevs.update(a) |
|
1420 | ancestordrevs.update(a) | |
1418 | return drevs, ancestordrevs |
|
1421 | return drevs, ancestordrevs | |
1419 |
|
1422 | |||
1420 |
|
1423 | |||
1421 | def querydrev(ui, spec): |
|
1424 | def querydrev(ui, spec): | |
1422 | """return a list of "Differential Revision" dicts |
|
1425 | """return a list of "Differential Revision" dicts | |
1423 |
|
1426 | |||
1424 | spec is a string using a simple query language, see docstring in phabread |
|
1427 | spec is a string using a simple query language, see docstring in phabread | |
1425 | for details. |
|
1428 | for details. | |
1426 |
|
1429 | |||
1427 | A "Differential Revision dict" looks like: |
|
1430 | A "Differential Revision dict" looks like: | |
1428 |
|
1431 | |||
1429 | { |
|
1432 | { | |
1430 | "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72", |
|
1433 | "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72", | |
1431 | "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye", |
|
1434 | "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye", | |
1432 | "auxiliary": { |
|
1435 | "auxiliary": { | |
1433 | "phabricator:depends-on": [ |
|
1436 | "phabricator:depends-on": [ | |
1434 | "PHID-DREV-gbapp366kutjebt7agcd" |
|
1437 | "PHID-DREV-gbapp366kutjebt7agcd" | |
1435 | ] |
|
1438 | ] | |
1436 | "phabricator:projects": [], |
|
1439 | "phabricator:projects": [], | |
1437 | }, |
|
1440 | }, | |
1438 | "branch": "default", |
|
1441 | "branch": "default", | |
1439 | "ccs": [], |
|
1442 | "ccs": [], | |
1440 | "commits": [], |
|
1443 | "commits": [], | |
1441 | "dateCreated": "1499181406", |
|
1444 | "dateCreated": "1499181406", | |
1442 | "dateModified": "1499182103", |
|
1445 | "dateModified": "1499182103", | |
1443 | "diffs": [ |
|
1446 | "diffs": [ | |
1444 | "3", |
|
1447 | "3", | |
1445 | "4", |
|
1448 | "4", | |
1446 | ], |
|
1449 | ], | |
1447 | "hashes": [], |
|
1450 | "hashes": [], | |
1448 | "id": "2", |
|
1451 | "id": "2", | |
1449 | "lineCount": "2", |
|
1452 | "lineCount": "2", | |
1450 | "phid": "PHID-DREV-672qvysjcczopag46qty", |
|
1453 | "phid": "PHID-DREV-672qvysjcczopag46qty", | |
1451 | "properties": {}, |
|
1454 | "properties": {}, | |
1452 | "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv", |
|
1455 | "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv", | |
1453 | "reviewers": [], |
|
1456 | "reviewers": [], | |
1454 | "sourcePath": null |
|
1457 | "sourcePath": null | |
1455 | "status": "0", |
|
1458 | "status": "0", | |
1456 | "statusName": "Needs Review", |
|
1459 | "statusName": "Needs Review", | |
1457 | "summary": "", |
|
1460 | "summary": "", | |
1458 | "testPlan": "", |
|
1461 | "testPlan": "", | |
1459 | "title": "example", |
|
1462 | "title": "example", | |
1460 | "uri": "https://phab.example.com/D2", |
|
1463 | "uri": "https://phab.example.com/D2", | |
1461 | } |
|
1464 | } | |
1462 | """ |
|
1465 | """ | |
1463 | # TODO: replace differential.query and differential.querydiffs with |
|
1466 | # TODO: replace differential.query and differential.querydiffs with | |
1464 | # differential.diff.search because the former (and their output) are |
|
1467 | # differential.diff.search because the former (and their output) are | |
1465 | # frozen, and planned to be deprecated and removed. |
|
1468 | # frozen, and planned to be deprecated and removed. | |
1466 |
|
1469 | |||
1467 | def fetch(params): |
|
1470 | def fetch(params): | |
1468 | """params -> single drev or None""" |
|
1471 | """params -> single drev or None""" | |
1469 | key = (params.get(b'ids') or params.get(b'phids') or [None])[0] |
|
1472 | key = (params.get(b'ids') or params.get(b'phids') or [None])[0] | |
1470 | if key in prefetched: |
|
1473 | if key in prefetched: | |
1471 | return prefetched[key] |
|
1474 | return prefetched[key] | |
1472 | drevs = callconduit(ui, b'differential.query', params) |
|
1475 | drevs = callconduit(ui, b'differential.query', params) | |
1473 | # Fill prefetched with the result |
|
1476 | # Fill prefetched with the result | |
1474 | for drev in drevs: |
|
1477 | for drev in drevs: | |
1475 | prefetched[drev[b'phid']] = drev |
|
1478 | prefetched[drev[b'phid']] = drev | |
1476 | prefetched[int(drev[b'id'])] = drev |
|
1479 | prefetched[int(drev[b'id'])] = drev | |
1477 | if key not in prefetched: |
|
1480 | if key not in prefetched: | |
1478 | raise error.Abort( |
|
1481 | raise error.Abort( | |
1479 | _(b'cannot get Differential Revision %r') % params |
|
1482 | _(b'cannot get Differential Revision %r') % params | |
1480 | ) |
|
1483 | ) | |
1481 | return prefetched[key] |
|
1484 | return prefetched[key] | |
1482 |
|
1485 | |||
1483 | def getstack(topdrevids): |
|
1486 | def getstack(topdrevids): | |
1484 | """given a top, get a stack from the bottom, [id] -> [id]""" |
|
1487 | """given a top, get a stack from the bottom, [id] -> [id]""" | |
1485 | visited = set() |
|
1488 | visited = set() | |
1486 | result = [] |
|
1489 | result = [] | |
1487 | queue = [{b'ids': [i]} for i in topdrevids] |
|
1490 | queue = [{b'ids': [i]} for i in topdrevids] | |
1488 | while queue: |
|
1491 | while queue: | |
1489 | params = queue.pop() |
|
1492 | params = queue.pop() | |
1490 | drev = fetch(params) |
|
1493 | drev = fetch(params) | |
1491 | if drev[b'id'] in visited: |
|
1494 | if drev[b'id'] in visited: | |
1492 | continue |
|
1495 | continue | |
1493 | visited.add(drev[b'id']) |
|
1496 | visited.add(drev[b'id']) | |
1494 | result.append(int(drev[b'id'])) |
|
1497 | result.append(int(drev[b'id'])) | |
1495 | auxiliary = drev.get(b'auxiliary', {}) |
|
1498 | auxiliary = drev.get(b'auxiliary', {}) | |
1496 | depends = auxiliary.get(b'phabricator:depends-on', []) |
|
1499 | depends = auxiliary.get(b'phabricator:depends-on', []) | |
1497 | for phid in depends: |
|
1500 | for phid in depends: | |
1498 | queue.append({b'phids': [phid]}) |
|
1501 | queue.append({b'phids': [phid]}) | |
1499 | result.reverse() |
|
1502 | result.reverse() | |
1500 | return smartset.baseset(result) |
|
1503 | return smartset.baseset(result) | |
1501 |
|
1504 | |||
1502 | # Initialize prefetch cache |
|
1505 | # Initialize prefetch cache | |
1503 | prefetched = {} # {id or phid: drev} |
|
1506 | prefetched = {} # {id or phid: drev} | |
1504 |
|
1507 | |||
1505 | tree = _parse(spec) |
|
1508 | tree = _parse(spec) | |
1506 | drevs, ancestordrevs = _prefetchdrevs(tree) |
|
1509 | drevs, ancestordrevs = _prefetchdrevs(tree) | |
1507 |
|
1510 | |||
1508 | # developer config: phabricator.batchsize |
|
1511 | # developer config: phabricator.batchsize | |
1509 | batchsize = ui.configint(b'phabricator', b'batchsize') |
|
1512 | batchsize = ui.configint(b'phabricator', b'batchsize') | |
1510 |
|
1513 | |||
1511 | # Prefetch Differential Revisions in batch |
|
1514 | # Prefetch Differential Revisions in batch | |
1512 | tofetch = set(drevs) |
|
1515 | tofetch = set(drevs) | |
1513 | for r in ancestordrevs: |
|
1516 | for r in ancestordrevs: | |
1514 | tofetch.update(range(max(1, r - batchsize), r + 1)) |
|
1517 | tofetch.update(range(max(1, r - batchsize), r + 1)) | |
1515 | if drevs: |
|
1518 | if drevs: | |
1516 | fetch({b'ids': list(tofetch)}) |
|
1519 | fetch({b'ids': list(tofetch)}) | |
1517 | validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) |
|
1520 | validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) | |
1518 |
|
1521 | |||
1519 | # Walk through the tree, return smartsets |
|
1522 | # Walk through the tree, return smartsets | |
1520 | def walk(tree): |
|
1523 | def walk(tree): | |
1521 | op = tree[0] |
|
1524 | op = tree[0] | |
1522 | if op == b'symbol': |
|
1525 | if op == b'symbol': | |
1523 | drev = _parsedrev(tree[1]) |
|
1526 | drev = _parsedrev(tree[1]) | |
1524 | if drev: |
|
1527 | if drev: | |
1525 | return smartset.baseset([drev]) |
|
1528 | return smartset.baseset([drev]) | |
1526 | elif tree[1] in _knownstatusnames: |
|
1529 | elif tree[1] in _knownstatusnames: | |
1527 | drevs = [ |
|
1530 | drevs = [ | |
1528 | r |
|
1531 | r | |
1529 | for r in validids |
|
1532 | for r in validids | |
1530 | if _getstatusname(prefetched[r]) == tree[1] |
|
1533 | if _getstatusname(prefetched[r]) == tree[1] | |
1531 | ] |
|
1534 | ] | |
1532 | return smartset.baseset(drevs) |
|
1535 | return smartset.baseset(drevs) | |
1533 | else: |
|
1536 | else: | |
1534 | raise error.Abort(_(b'unknown symbol: %s') % tree[1]) |
|
1537 | raise error.Abort(_(b'unknown symbol: %s') % tree[1]) | |
1535 | elif op in {b'and_', b'add', b'sub'}: |
|
1538 | elif op in {b'and_', b'add', b'sub'}: | |
1536 | assert len(tree) == 3 |
|
1539 | assert len(tree) == 3 | |
1537 | return getattr(operator, op)(walk(tree[1]), walk(tree[2])) |
|
1540 | return getattr(operator, op)(walk(tree[1]), walk(tree[2])) | |
1538 | elif op == b'group': |
|
1541 | elif op == b'group': | |
1539 | return walk(tree[1]) |
|
1542 | return walk(tree[1]) | |
1540 | elif op == b'ancestors': |
|
1543 | elif op == b'ancestors': | |
1541 | return getstack(walk(tree[1])) |
|
1544 | return getstack(walk(tree[1])) | |
1542 | else: |
|
1545 | else: | |
1543 | raise error.ProgrammingError(b'illegal tree: %r' % tree) |
|
1546 | raise error.ProgrammingError(b'illegal tree: %r' % tree) | |
1544 |
|
1547 | |||
1545 | return [prefetched[r] for r in walk(tree)] |
|
1548 | return [prefetched[r] for r in walk(tree)] | |
1546 |
|
1549 | |||
1547 |
|
1550 | |||
1548 | def getdescfromdrev(drev): |
|
1551 | def getdescfromdrev(drev): | |
1549 | """get description (commit message) from "Differential Revision" |
|
1552 | """get description (commit message) from "Differential Revision" | |
1550 |
|
1553 | |||
1551 | This is similar to differential.getcommitmessage API. But we only care |
|
1554 | This is similar to differential.getcommitmessage API. But we only care | |
1552 | about limited fields: title, summary, test plan, and URL. |
|
1555 | about limited fields: title, summary, test plan, and URL. | |
1553 | """ |
|
1556 | """ | |
1554 | title = drev[b'title'] |
|
1557 | title = drev[b'title'] | |
1555 | summary = drev[b'summary'].rstrip() |
|
1558 | summary = drev[b'summary'].rstrip() | |
1556 | testplan = drev[b'testPlan'].rstrip() |
|
1559 | testplan = drev[b'testPlan'].rstrip() | |
1557 | if testplan: |
|
1560 | if testplan: | |
1558 | testplan = b'Test Plan:\n%s' % testplan |
|
1561 | testplan = b'Test Plan:\n%s' % testplan | |
1559 | uri = b'Differential Revision: %s' % drev[b'uri'] |
|
1562 | uri = b'Differential Revision: %s' % drev[b'uri'] | |
1560 | return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) |
|
1563 | return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) | |
1561 |
|
1564 | |||
1562 |
|
1565 | |||
1563 | def getdiffmeta(diff): |
|
1566 | def getdiffmeta(diff): | |
1564 | """get commit metadata (date, node, user, p1) from a diff object |
|
1567 | """get commit metadata (date, node, user, p1) from a diff object | |
1565 |
|
1568 | |||
1566 | The metadata could be "hg:meta", sent by phabsend, like: |
|
1569 | The metadata could be "hg:meta", sent by phabsend, like: | |
1567 |
|
1570 | |||
1568 | "properties": { |
|
1571 | "properties": { | |
1569 | "hg:meta": { |
|
1572 | "hg:meta": { | |
1570 | "branch": "default", |
|
1573 | "branch": "default", | |
1571 | "date": "1499571514 25200", |
|
1574 | "date": "1499571514 25200", | |
1572 | "node": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1575 | "node": "98c08acae292b2faf60a279b4189beb6cff1414d", | |
1573 | "user": "Foo Bar <foo@example.com>", |
|
1576 | "user": "Foo Bar <foo@example.com>", | |
1574 | "parent": "6d0abad76b30e4724a37ab8721d630394070fe16" |
|
1577 | "parent": "6d0abad76b30e4724a37ab8721d630394070fe16" | |
1575 | } |
|
1578 | } | |
1576 | } |
|
1579 | } | |
1577 |
|
1580 | |||
1578 | Or converted from "local:commits", sent by "arc", like: |
|
1581 | Or converted from "local:commits", sent by "arc", like: | |
1579 |
|
1582 | |||
1580 | "properties": { |
|
1583 | "properties": { | |
1581 | "local:commits": { |
|
1584 | "local:commits": { | |
1582 | "98c08acae292b2faf60a279b4189beb6cff1414d": { |
|
1585 | "98c08acae292b2faf60a279b4189beb6cff1414d": { | |
1583 | "author": "Foo Bar", |
|
1586 | "author": "Foo Bar", | |
1584 | "authorEmail": "foo@example.com" |
|
1587 | "authorEmail": "foo@example.com" | |
1585 | "branch": "default", |
|
1588 | "branch": "default", | |
1586 | "commit": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1589 | "commit": "98c08acae292b2faf60a279b4189beb6cff1414d", | |
1587 | "local": "1000", |
|
1590 | "local": "1000", | |
1588 | "message": "...", |
|
1591 | "message": "...", | |
1589 | "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"], |
|
1592 | "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"], | |
1590 | "rev": "98c08acae292b2faf60a279b4189beb6cff1414d", |
|
1593 | "rev": "98c08acae292b2faf60a279b4189beb6cff1414d", | |
1591 | "summary": "...", |
|
1594 | "summary": "...", | |
1592 | "tag": "", |
|
1595 | "tag": "", | |
1593 | "time": 1499546314, |
|
1596 | "time": 1499546314, | |
1594 | } |
|
1597 | } | |
1595 | } |
|
1598 | } | |
1596 | } |
|
1599 | } | |
1597 |
|
1600 | |||
1598 | Note: metadata extracted from "local:commits" will lose time zone |
|
1601 | Note: metadata extracted from "local:commits" will lose time zone | |
1599 | information. |
|
1602 | information. | |
1600 | """ |
|
1603 | """ | |
1601 | props = diff.get(b'properties') or {} |
|
1604 | props = diff.get(b'properties') or {} | |
1602 | meta = props.get(b'hg:meta') |
|
1605 | meta = props.get(b'hg:meta') | |
1603 | if not meta: |
|
1606 | if not meta: | |
1604 | if props.get(b'local:commits'): |
|
1607 | if props.get(b'local:commits'): | |
1605 | commit = sorted(props[b'local:commits'].values())[0] |
|
1608 | commit = sorted(props[b'local:commits'].values())[0] | |
1606 | meta = {} |
|
1609 | meta = {} | |
1607 | if b'author' in commit and b'authorEmail' in commit: |
|
1610 | if b'author' in commit and b'authorEmail' in commit: | |
1608 | meta[b'user'] = b'%s <%s>' % ( |
|
1611 | meta[b'user'] = b'%s <%s>' % ( | |
1609 | commit[b'author'], |
|
1612 | commit[b'author'], | |
1610 | commit[b'authorEmail'], |
|
1613 | commit[b'authorEmail'], | |
1611 | ) |
|
1614 | ) | |
1612 | if b'time' in commit: |
|
1615 | if b'time' in commit: | |
1613 | meta[b'date'] = b'%d 0' % int(commit[b'time']) |
|
1616 | meta[b'date'] = b'%d 0' % int(commit[b'time']) | |
1614 | if b'branch' in commit: |
|
1617 | if b'branch' in commit: | |
1615 | meta[b'branch'] = commit[b'branch'] |
|
1618 | meta[b'branch'] = commit[b'branch'] | |
1616 | node = commit.get(b'commit', commit.get(b'rev')) |
|
1619 | node = commit.get(b'commit', commit.get(b'rev')) | |
1617 | if node: |
|
1620 | if node: | |
1618 | meta[b'node'] = node |
|
1621 | meta[b'node'] = node | |
1619 | if len(commit.get(b'parents', ())) >= 1: |
|
1622 | if len(commit.get(b'parents', ())) >= 1: | |
1620 | meta[b'parent'] = commit[b'parents'][0] |
|
1623 | meta[b'parent'] = commit[b'parents'][0] | |
1621 | else: |
|
1624 | else: | |
1622 | meta = {} |
|
1625 | meta = {} | |
1623 | if b'date' not in meta and b'dateCreated' in diff: |
|
1626 | if b'date' not in meta and b'dateCreated' in diff: | |
1624 | meta[b'date'] = b'%s 0' % diff[b'dateCreated'] |
|
1627 | meta[b'date'] = b'%s 0' % diff[b'dateCreated'] | |
1625 | if b'branch' not in meta and diff.get(b'branch'): |
|
1628 | if b'branch' not in meta and diff.get(b'branch'): | |
1626 | meta[b'branch'] = diff[b'branch'] |
|
1629 | meta[b'branch'] = diff[b'branch'] | |
1627 | if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'): |
|
1630 | if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'): | |
1628 | meta[b'parent'] = diff[b'sourceControlBaseRevision'] |
|
1631 | meta[b'parent'] = diff[b'sourceControlBaseRevision'] | |
1629 | return meta |
|
1632 | return meta | |
1630 |
|
1633 | |||
1631 |
|
1634 | |||
1632 | def readpatch(ui, drevs, write): |
|
1635 | def readpatch(ui, drevs, write): | |
1633 | """generate plain-text patch readable by 'hg import' |
|
1636 | """generate plain-text patch readable by 'hg import' | |
1634 |
|
1637 | |||
1635 | write takes a list of (DREV, bytes), where DREV is the differential number |
|
1638 | write takes a list of (DREV, bytes), where DREV is the differential number | |
1636 | (as bytes, without the "D" prefix) and the bytes are the text of a patch |
|
1639 | (as bytes, without the "D" prefix) and the bytes are the text of a patch | |
1637 | to be imported. drevs is what "querydrev" returns, results of |
|
1640 | to be imported. drevs is what "querydrev" returns, results of | |
1638 | "differential.query". |
|
1641 | "differential.query". | |
1639 | """ |
|
1642 | """ | |
1640 | # Prefetch hg:meta property for all diffs |
|
1643 | # Prefetch hg:meta property for all diffs | |
1641 | diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs}) |
|
1644 | diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs}) | |
1642 | diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) |
|
1645 | diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) | |
1643 |
|
1646 | |||
1644 | patches = [] |
|
1647 | patches = [] | |
1645 |
|
1648 | |||
1646 | # Generate patch for each drev |
|
1649 | # Generate patch for each drev | |
1647 | for drev in drevs: |
|
1650 | for drev in drevs: | |
1648 | ui.note(_(b'reading D%s\n') % drev[b'id']) |
|
1651 | ui.note(_(b'reading D%s\n') % drev[b'id']) | |
1649 |
|
1652 | |||
1650 | diffid = max(int(v) for v in drev[b'diffs']) |
|
1653 | diffid = max(int(v) for v in drev[b'diffs']) | |
1651 | body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid}) |
|
1654 | body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid}) | |
1652 | desc = getdescfromdrev(drev) |
|
1655 | desc = getdescfromdrev(drev) | |
1653 | header = b'# HG changeset patch\n' |
|
1656 | header = b'# HG changeset patch\n' | |
1654 |
|
1657 | |||
1655 | # Try to preserve metadata from hg:meta property. Write hg patch |
|
1658 | # Try to preserve metadata from hg:meta property. Write hg patch | |
1656 | # headers that can be read by the "import" command. See patchheadermap |
|
1659 | # headers that can be read by the "import" command. See patchheadermap | |
1657 | # and extract in mercurial/patch.py for supported headers. |
|
1660 | # and extract in mercurial/patch.py for supported headers. | |
1658 | meta = getdiffmeta(diffs[b'%d' % diffid]) |
|
1661 | meta = getdiffmeta(diffs[b'%d' % diffid]) | |
1659 | for k in _metanamemap.keys(): |
|
1662 | for k in _metanamemap.keys(): | |
1660 | if k in meta: |
|
1663 | if k in meta: | |
1661 | header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
|
1664 | header += b'# %s %s\n' % (_metanamemap[k], meta[k]) | |
1662 |
|
1665 | |||
1663 | content = b'%s%s\n%s' % (header, desc, body) |
|
1666 | content = b'%s%s\n%s' % (header, desc, body) | |
1664 | patches.append((drev[b'id'], content)) |
|
1667 | patches.append((drev[b'id'], content)) | |
1665 |
|
1668 | |||
1666 | # Write patches to the supplied callback |
|
1669 | # Write patches to the supplied callback | |
1667 | write(patches) |
|
1670 | write(patches) | |
1668 |
|
1671 | |||
1669 |
|
1672 | |||
1670 | @vcrcommand( |
|
1673 | @vcrcommand( | |
1671 | b'phabread', |
|
1674 | b'phabread', | |
1672 | [(b'', b'stack', False, _(b'read dependencies'))], |
|
1675 | [(b'', b'stack', False, _(b'read dependencies'))], | |
1673 | _(b'DREVSPEC [OPTIONS]'), |
|
1676 | _(b'DREVSPEC [OPTIONS]'), | |
1674 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1677 | helpcategory=command.CATEGORY_IMPORT_EXPORT, | |
1675 | optionalrepo=True, |
|
1678 | optionalrepo=True, | |
1676 | ) |
|
1679 | ) | |
1677 | def phabread(ui, repo, spec, **opts): |
|
1680 | def phabread(ui, repo, spec, **opts): | |
1678 | """print patches from Phabricator suitable for importing |
|
1681 | """print patches from Phabricator suitable for importing | |
1679 |
|
1682 | |||
1680 | DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
|
1683 | DREVSPEC could be a Differential Revision identity, like ``D123``, or just | |
1681 | the number ``123``. It could also have common operators like ``+``, ``-``, |
|
1684 | the number ``123``. It could also have common operators like ``+``, ``-``, | |
1682 | ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to |
|
1685 | ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to | |
1683 | select a stack. |
|
1686 | select a stack. | |
1684 |
|
1687 | |||
1685 | ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision`` |
|
1688 | ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision`` | |
1686 | could be used to filter patches by status. For performance reason, they |
|
1689 | could be used to filter patches by status. For performance reason, they | |
1687 | only represent a subset of non-status selections and cannot be used alone. |
|
1690 | only represent a subset of non-status selections and cannot be used alone. | |
1688 |
|
1691 | |||
1689 | For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude |
|
1692 | For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude | |
1690 | D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a |
|
1693 | D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a | |
1691 | stack up to D9. |
|
1694 | stack up to D9. | |
1692 |
|
1695 | |||
1693 | If --stack is given, follow dependencies information and read all patches. |
|
1696 | If --stack is given, follow dependencies information and read all patches. | |
1694 | It is equivalent to the ``:`` operator. |
|
1697 | It is equivalent to the ``:`` operator. | |
1695 | """ |
|
1698 | """ | |
1696 | opts = pycompat.byteskwargs(opts) |
|
1699 | opts = pycompat.byteskwargs(opts) | |
1697 | if opts.get(b'stack'): |
|
1700 | if opts.get(b'stack'): | |
1698 | spec = b':(%s)' % spec |
|
1701 | spec = b':(%s)' % spec | |
1699 | drevs = querydrev(ui, spec) |
|
1702 | drevs = querydrev(ui, spec) | |
1700 |
|
1703 | |||
1701 | def _write(patches): |
|
1704 | def _write(patches): | |
1702 | for drev, content in patches: |
|
1705 | for drev, content in patches: | |
1703 | ui.write(content) |
|
1706 | ui.write(content) | |
1704 |
|
1707 | |||
1705 | readpatch(ui, drevs, _write) |
|
1708 | readpatch(ui, drevs, _write) | |
1706 |
|
1709 | |||
1707 |
|
1710 | |||
1708 | @vcrcommand( |
|
1711 | @vcrcommand( | |
1709 | b'phabimport', |
|
1712 | b'phabimport', | |
1710 | [(b'', b'stack', False, _(b'import dependencies as well'))], |
|
1713 | [(b'', b'stack', False, _(b'import dependencies as well'))], | |
1711 | _(b'DREVSPEC [OPTIONS]'), |
|
1714 | _(b'DREVSPEC [OPTIONS]'), | |
1712 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1715 | helpcategory=command.CATEGORY_IMPORT_EXPORT, | |
1713 | ) |
|
1716 | ) | |
1714 | def phabimport(ui, repo, spec, **opts): |
|
1717 | def phabimport(ui, repo, spec, **opts): | |
1715 | """import patches from Phabricator for the specified Differential Revisions |
|
1718 | """import patches from Phabricator for the specified Differential Revisions | |
1716 |
|
1719 | |||
1717 | The patches are read and applied starting at the parent of the working |
|
1720 | The patches are read and applied starting at the parent of the working | |
1718 | directory. |
|
1721 | directory. | |
1719 |
|
1722 | |||
1720 | See ``hg help phabread`` for how to specify DREVSPEC. |
|
1723 | See ``hg help phabread`` for how to specify DREVSPEC. | |
1721 | """ |
|
1724 | """ | |
1722 | opts = pycompat.byteskwargs(opts) |
|
1725 | opts = pycompat.byteskwargs(opts) | |
1723 |
|
1726 | |||
1724 | # --bypass avoids losing exec and symlink bits when importing on Windows, |
|
1727 | # --bypass avoids losing exec and symlink bits when importing on Windows, | |
1725 | # and allows importing with a dirty wdir. It also aborts instead of leaving |
|
1728 | # and allows importing with a dirty wdir. It also aborts instead of leaving | |
1726 | # rejects. |
|
1729 | # rejects. | |
1727 | opts[b'bypass'] = True |
|
1730 | opts[b'bypass'] = True | |
1728 |
|
1731 | |||
1729 | # Mandatory default values, synced with commands.import |
|
1732 | # Mandatory default values, synced with commands.import | |
1730 | opts[b'strip'] = 1 |
|
1733 | opts[b'strip'] = 1 | |
1731 | opts[b'prefix'] = b'' |
|
1734 | opts[b'prefix'] = b'' | |
1732 | # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone() |
|
1735 | # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone() | |
1733 | opts[b'obsolete'] = False |
|
1736 | opts[b'obsolete'] = False | |
1734 |
|
1737 | |||
1735 | if ui.configbool(b'phabimport', b'secret'): |
|
1738 | if ui.configbool(b'phabimport', b'secret'): | |
1736 | opts[b'secret'] = True |
|
1739 | opts[b'secret'] = True | |
|
1740 | if ui.configbool(b'phabimport', b'obsolete'): | |||
|
1741 | opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone() | |||
1737 |
|
1742 | |||
1738 | def _write(patches): |
|
1743 | def _write(patches): | |
1739 | parents = repo[None].parents() |
|
1744 | parents = repo[None].parents() | |
1740 |
|
1745 | |||
1741 | with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'): |
|
1746 | with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'): | |
1742 | for drev, contents in patches: |
|
1747 | for drev, contents in patches: | |
1743 | ui.status(_(b'applying patch from D%s\n') % drev) |
|
1748 | ui.status(_(b'applying patch from D%s\n') % drev) | |
1744 |
|
1749 | |||
1745 | with patch.extract(ui, pycompat.bytesio(contents)) as patchdata: |
|
1750 | with patch.extract(ui, pycompat.bytesio(contents)) as patchdata: | |
1746 | msg, node, rej = cmdutil.tryimportone( |
|
1751 | msg, node, rej = cmdutil.tryimportone( | |
1747 | ui, |
|
1752 | ui, | |
1748 | repo, |
|
1753 | repo, | |
1749 | patchdata, |
|
1754 | patchdata, | |
1750 | parents, |
|
1755 | parents, | |
1751 | opts, |
|
1756 | opts, | |
1752 | [], |
|
1757 | [], | |
1753 | None, # Never update wdir to another revision |
|
1758 | None, # Never update wdir to another revision | |
1754 | ) |
|
1759 | ) | |
1755 |
|
1760 | |||
1756 | if not node: |
|
1761 | if not node: | |
1757 | raise error.Abort(_(b'D%s: no diffs found') % drev) |
|
1762 | raise error.Abort(_(b'D%s: no diffs found') % drev) | |
1758 |
|
1763 | |||
1759 | ui.note(msg + b'\n') |
|
1764 | ui.note(msg + b'\n') | |
1760 | parents = [repo[node]] |
|
1765 | parents = [repo[node]] | |
1761 |
|
1766 | |||
1762 | opts = pycompat.byteskwargs(opts) |
|
1767 | opts = pycompat.byteskwargs(opts) | |
1763 | if opts.get(b'stack'): |
|
1768 | if opts.get(b'stack'): | |
1764 | spec = b':(%s)' % spec |
|
1769 | spec = b':(%s)' % spec | |
1765 | drevs = querydrev(repo.ui, spec) |
|
1770 | drevs = querydrev(repo.ui, spec) | |
1766 |
|
1771 | |||
1767 | readpatch(repo.ui, drevs, _write) |
|
1772 | readpatch(repo.ui, drevs, _write) | |
1768 |
|
1773 | |||
1769 |
|
1774 | |||
1770 | @vcrcommand( |
|
1775 | @vcrcommand( | |
1771 | b'phabupdate', |
|
1776 | b'phabupdate', | |
1772 | [ |
|
1777 | [ | |
1773 | (b'', b'accept', False, _(b'accept revisions')), |
|
1778 | (b'', b'accept', False, _(b'accept revisions')), | |
1774 | (b'', b'reject', False, _(b'reject revisions')), |
|
1779 | (b'', b'reject', False, _(b'reject revisions')), | |
1775 | (b'', b'abandon', False, _(b'abandon revisions')), |
|
1780 | (b'', b'abandon', False, _(b'abandon revisions')), | |
1776 | (b'', b'reclaim', False, _(b'reclaim revisions')), |
|
1781 | (b'', b'reclaim', False, _(b'reclaim revisions')), | |
1777 | (b'm', b'comment', b'', _(b'comment on the last revision')), |
|
1782 | (b'm', b'comment', b'', _(b'comment on the last revision')), | |
1778 | ], |
|
1783 | ], | |
1779 | _(b'DREVSPEC [OPTIONS]'), |
|
1784 | _(b'DREVSPEC [OPTIONS]'), | |
1780 | helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1785 | helpcategory=command.CATEGORY_IMPORT_EXPORT, | |
1781 | optionalrepo=True, |
|
1786 | optionalrepo=True, | |
1782 | ) |
|
1787 | ) | |
1783 | def phabupdate(ui, repo, spec, **opts): |
|
1788 | def phabupdate(ui, repo, spec, **opts): | |
1784 | """update Differential Revision in batch |
|
1789 | """update Differential Revision in batch | |
1785 |
|
1790 | |||
1786 | DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
|
1791 | DREVSPEC selects revisions. See :hg:`help phabread` for its usage. | |
1787 | """ |
|
1792 | """ | |
1788 | opts = pycompat.byteskwargs(opts) |
|
1793 | opts = pycompat.byteskwargs(opts) | |
1789 | flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] |
|
1794 | flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] | |
1790 | if len(flags) > 1: |
|
1795 | if len(flags) > 1: | |
1791 | raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) |
|
1796 | raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) | |
1792 |
|
1797 | |||
1793 | actions = [] |
|
1798 | actions = [] | |
1794 | for f in flags: |
|
1799 | for f in flags: | |
1795 | actions.append({b'type': f, b'value': True}) |
|
1800 | actions.append({b'type': f, b'value': True}) | |
1796 |
|
1801 | |||
1797 | drevs = querydrev(ui, spec) |
|
1802 | drevs = querydrev(ui, spec) | |
1798 | for i, drev in enumerate(drevs): |
|
1803 | for i, drev in enumerate(drevs): | |
1799 | if i + 1 == len(drevs) and opts.get(b'comment'): |
|
1804 | if i + 1 == len(drevs) and opts.get(b'comment'): | |
1800 | actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
|
1805 | actions.append({b'type': b'comment', b'value': opts[b'comment']}) | |
1801 | if actions: |
|
1806 | if actions: | |
1802 | params = { |
|
1807 | params = { | |
1803 | b'objectIdentifier': drev[b'phid'], |
|
1808 | b'objectIdentifier': drev[b'phid'], | |
1804 | b'transactions': actions, |
|
1809 | b'transactions': actions, | |
1805 | } |
|
1810 | } | |
1806 | callconduit(ui, b'differential.revision.edit', params) |
|
1811 | callconduit(ui, b'differential.revision.edit', params) | |
1807 |
|
1812 | |||
1808 |
|
1813 | |||
1809 | @eh.templatekeyword(b'phabreview', requires={b'ctx'}) |
|
1814 | @eh.templatekeyword(b'phabreview', requires={b'ctx'}) | |
1810 | def template_review(context, mapping): |
|
1815 | def template_review(context, mapping): | |
1811 | """:phabreview: Object describing the review for this changeset. |
|
1816 | """:phabreview: Object describing the review for this changeset. | |
1812 | Has attributes `url` and `id`. |
|
1817 | Has attributes `url` and `id`. | |
1813 | """ |
|
1818 | """ | |
1814 | ctx = context.resource(mapping, b'ctx') |
|
1819 | ctx = context.resource(mapping, b'ctx') | |
1815 | m = _differentialrevisiondescre.search(ctx.description()) |
|
1820 | m = _differentialrevisiondescre.search(ctx.description()) | |
1816 | if m: |
|
1821 | if m: | |
1817 | return templateutil.hybriddict( |
|
1822 | return templateutil.hybriddict( | |
1818 | {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),} |
|
1823 | {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),} | |
1819 | ) |
|
1824 | ) | |
1820 | else: |
|
1825 | else: | |
1821 | tags = ctx.repo().nodetags(ctx.node()) |
|
1826 | tags = ctx.repo().nodetags(ctx.node()) | |
1822 | for t in tags: |
|
1827 | for t in tags: | |
1823 | if _differentialrevisiontagre.match(t): |
|
1828 | if _differentialrevisiontagre.match(t): | |
1824 | url = ctx.repo().ui.config(b'phabricator', b'url') |
|
1829 | url = ctx.repo().ui.config(b'phabricator', b'url') | |
1825 | if not url.endswith(b'/'): |
|
1830 | if not url.endswith(b'/'): | |
1826 | url += b'/' |
|
1831 | url += b'/' | |
1827 | url += t |
|
1832 | url += t | |
1828 |
|
1833 | |||
1829 | return templateutil.hybriddict({b'url': url, b'id': t,}) |
|
1834 | return templateutil.hybriddict({b'url': url, b'id': t,}) | |
1830 | return None |
|
1835 | return None | |
1831 |
|
1836 | |||
1832 |
|
1837 | |||
1833 | @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'}) |
|
1838 | @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'}) | |
1834 | def template_status(context, mapping): |
|
1839 | def template_status(context, mapping): | |
1835 | """:phabstatus: String. Status of Phabricator differential. |
|
1840 | """:phabstatus: String. Status of Phabricator differential. | |
1836 | """ |
|
1841 | """ | |
1837 | ctx = context.resource(mapping, b'ctx') |
|
1842 | ctx = context.resource(mapping, b'ctx') | |
1838 | repo = context.resource(mapping, b'repo') |
|
1843 | repo = context.resource(mapping, b'repo') | |
1839 | ui = context.resource(mapping, b'ui') |
|
1844 | ui = context.resource(mapping, b'ui') | |
1840 |
|
1845 | |||
1841 | rev = ctx.rev() |
|
1846 | rev = ctx.rev() | |
1842 | try: |
|
1847 | try: | |
1843 | drevid = getdrevmap(repo, [rev])[rev] |
|
1848 | drevid = getdrevmap(repo, [rev])[rev] | |
1844 | except KeyError: |
|
1849 | except KeyError: | |
1845 | return None |
|
1850 | return None | |
1846 | drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]}) |
|
1851 | drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]}) | |
1847 | for drev in drevs: |
|
1852 | for drev in drevs: | |
1848 | if int(drev[b'id']) == drevid: |
|
1853 | if int(drev[b'id']) == drevid: | |
1849 | return templateutil.hybriddict( |
|
1854 | return templateutil.hybriddict( | |
1850 | {b'url': drev[b'uri'], b'status': drev[b'statusName'],} |
|
1855 | {b'url': drev[b'uri'], b'status': drev[b'statusName'],} | |
1851 | ) |
|
1856 | ) | |
1852 | return None |
|
1857 | return None | |
1853 |
|
1858 | |||
1854 |
|
1859 | |||
1855 | @show.showview(b'phabstatus', csettopic=b'work') |
|
1860 | @show.showview(b'phabstatus', csettopic=b'work') | |
1856 | def phabstatusshowview(ui, repo, displayer): |
|
1861 | def phabstatusshowview(ui, repo, displayer): | |
1857 | """Phabricator differiential status""" |
|
1862 | """Phabricator differiential status""" | |
1858 | revs = repo.revs('sort(_underway(), topo)') |
|
1863 | revs = repo.revs('sort(_underway(), topo)') | |
1859 | drevmap = getdrevmap(repo, revs) |
|
1864 | drevmap = getdrevmap(repo, revs) | |
1860 | unknownrevs, drevids, revsbydrevid = [], set(), {} |
|
1865 | unknownrevs, drevids, revsbydrevid = [], set(), {} | |
1861 | for rev, drevid in pycompat.iteritems(drevmap): |
|
1866 | for rev, drevid in pycompat.iteritems(drevmap): | |
1862 | if drevid is not None: |
|
1867 | if drevid is not None: | |
1863 | drevids.add(drevid) |
|
1868 | drevids.add(drevid) | |
1864 | revsbydrevid.setdefault(drevid, set()).add(rev) |
|
1869 | revsbydrevid.setdefault(drevid, set()).add(rev) | |
1865 | else: |
|
1870 | else: | |
1866 | unknownrevs.append(rev) |
|
1871 | unknownrevs.append(rev) | |
1867 |
|
1872 | |||
1868 | drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)}) |
|
1873 | drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)}) | |
1869 | drevsbyrev = {} |
|
1874 | drevsbyrev = {} | |
1870 | for drev in drevs: |
|
1875 | for drev in drevs: | |
1871 | for rev in revsbydrevid[int(drev[b'id'])]: |
|
1876 | for rev in revsbydrevid[int(drev[b'id'])]: | |
1872 | drevsbyrev[rev] = drev |
|
1877 | drevsbyrev[rev] = drev | |
1873 |
|
1878 | |||
1874 | def phabstatus(ctx): |
|
1879 | def phabstatus(ctx): | |
1875 | drev = drevsbyrev[ctx.rev()] |
|
1880 | drev = drevsbyrev[ctx.rev()] | |
1876 | status = ui.label( |
|
1881 | status = ui.label( | |
1877 | b'%(statusName)s' % drev, |
|
1882 | b'%(statusName)s' % drev, | |
1878 | b'phabricator.status.%s' % _getstatusname(drev), |
|
1883 | b'phabricator.status.%s' % _getstatusname(drev), | |
1879 | ) |
|
1884 | ) | |
1880 | ui.write(b"\n%s %s\n" % (drev[b'uri'], status)) |
|
1885 | ui.write(b"\n%s %s\n" % (drev[b'uri'], status)) | |
1881 |
|
1886 | |||
1882 | revs -= smartset.baseset(unknownrevs) |
|
1887 | revs -= smartset.baseset(unknownrevs) | |
1883 | revdag = graphmod.dagwalker(repo, revs) |
|
1888 | revdag = graphmod.dagwalker(repo, revs) | |
1884 |
|
1889 | |||
1885 | ui.setconfig(b'experimental', b'graphshorten', True) |
|
1890 | ui.setconfig(b'experimental', b'graphshorten', True) | |
1886 | displayer._exthook = phabstatus |
|
1891 | displayer._exthook = phabstatus | |
1887 | nodelen = show.longestshortest(repo, revs) |
|
1892 | nodelen = show.longestshortest(repo, revs) | |
1888 | logcmdutil.displaygraph( |
|
1893 | logcmdutil.displaygraph( | |
1889 | ui, |
|
1894 | ui, | |
1890 | repo, |
|
1895 | repo, | |
1891 | revdag, |
|
1896 | revdag, | |
1892 | displayer, |
|
1897 | displayer, | |
1893 | graphmod.asciiedges, |
|
1898 | graphmod.asciiedges, | |
1894 | props={b'nodelen': nodelen}, |
|
1899 | props={b'nodelen': nodelen}, | |
1895 | ) |
|
1900 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now