##// END OF EJS Templates
phabupdate: allow revisions to be marked with "plan changes"...
Matt Harbison -
r45695:225588c4 default
parent child Browse files
Show More
@@ -0,0 +1,141 b''
1 {
2 "version": 1,
3 "interactions": [
4 {
5 "response": {
6 "headers": {
7 "x-xss-protection": [
8 "1; mode=block"
9 ],
10 "expires": [
11 "Sat, 01 Jan 2000 00:00:00 GMT"
12 ],
13 "server": [
14 "Apache/2.4.10 (Debian)"
15 ],
16 "date": [
17 "Wed, 15 Jul 2020 17:23:27 GMT"
18 ],
19 "cache-control": [
20 "no-store"
21 ],
22 "content-type": [
23 "application/json"
24 ],
25 "transfer-encoding": [
26 "chunked"
27 ],
28 "strict-transport-security": [
29 "max-age=0; includeSubdomains; preload"
30 ],
31 "x-frame-options": [
32 "Deny"
33 ],
34 "referrer-policy": [
35 "no-referrer"
36 ],
37 "x-content-type-options": [
38 "nosniff"
39 ]
40 },
41 "body": {
42 "string": "{\"result\":[{\"id\":\"6876\",\"phid\":\"PHID-DREV-looitrxgt3omaau7a7qk\",\"title\":\"phabricator: support automatically obsoleting old revisions of pulled commits\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D6876\",\"dateCreated\":\"1569388644\",\"dateModified\":\"1579887103\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"1\",\"statusName\":\"Needs Revision\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":68,\"lines.removed\":1},\"branch\":null,\"summary\":\"This is basically an import of the `pullcreatemarkers` extension[1] from the FB\\nrepo, with minor adjustments to `getmatchingdiff()` to work with modern hg.\\nSince this is very phabricator specific, it makes more sense to me to bundle it\\ninto the existing extension. It wasn't very obvious from the old name what\\nfunctionality was provided, and it may make sense to do this in other scenarios\\nbesides `hg pull`.\\n\\nThere are two use cases that I can see- first, ensuring that old revisions are\\ncleaned up for a contributor (I seem to recall something I submitted recently\\nneeded to be explicitly pruned, though most submissions do clean up\\nautomatically). Second, any `hg phabread | hg import -` would otherwise need to\\nbe manually cleaned up. The latter is annoying enough that I tend not to grab\\nthe code and try it when reviewing.\\n\\nIt is currently guarded by a config option (off by default), because @marmoute\\nexpressed concerns about duplicate marker creation if the pushing reviewer also\\ncreates a marker. I don't think that's possible here, since the obsolete\\nrevisions are explicitly excluded. But maybe there are other reasons someone\\nwouldn't want older revisions obsoleted. The config name reflects the fact that\\nI'm not sure if other things like import should get this too.\\n\\nI suspect that we could wrap a function deeper in the pull sequence to improve\\nboth the code and the UX. For example, when pulling an obsolete marker, it can\\nprint out a warning that the working directory parent is obsolete, but that\\ndoesn't happen here. (It won't happen with this test. It *should* without the\\n`--bypass` option, but doesn't.) It should also be possible to not have to\\nquery the range of new revisions, and maybe it can be added to the existing\\ntransaction.\\n\\n[1] https:\\/\\/bitbucket.org\\/facebook\\/hg-experimental\\/src\\/default\\/hgext3rd\\/pullcreatemarkers.py\",\"testPlan\":\"\",\"lineCount\":\"69\",\"activeDiffPHID\":\"PHID-DIFF-jdpqpzciqcooaxf2kojh\",\"diffs\":[\"16604\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\",\"PHID-USER-cah4b6i3kszy6debh3bl\":\"PHID-USER-cah4b6i3kszy6debh3bl\"},\"ccs\":[\"PHID-USER-34jnztnonbr4lhwuybwl\",\"PHID-USER-e66t6wbudjtigdnqbl3e\",\"PHID-USER-5iy6mkoveguhm2zthvww\",\"PHID-USER-q42dn7cc3donqriafhjx\",\"PHID-USER-vflsibccj4unqydwfvne\"],\"hashes\":[],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-2dbanvk64h5wguhxta2o\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":null}],\"error_code\":null,\"error_info\":null}"
43 },
44 "status": {
45 "message": "OK",
46 "code": 200
47 }
48 },
49 "request": {
50 "uri": "https://phab.mercurial-scm.org//api/differential.query",
51 "body": "output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B6876%5D%7D&__conduit__=1",
52 "method": "POST",
53 "headers": {
54 "content-length": [
55 "146"
56 ],
57 "accept": [
58 "application/mercurial-0.1"
59 ],
60 "content-type": [
61 "application/x-www-form-urlencoded"
62 ],
63 "user-agent": [
64 "mercurial/proto-1.0 (Mercurial 5.4.2+207-8403cc54bc83+20200709)"
65 ],
66 "host": [
67 "phab.mercurial-scm.org"
68 ]
69 }
70 }
71 },
72 {
73 "response": {
74 "headers": {
75 "x-xss-protection": [
76 "1; mode=block"
77 ],
78 "expires": [
79 "Sat, 01 Jan 2000 00:00:00 GMT"
80 ],
81 "server": [
82 "Apache/2.4.10 (Debian)"
83 ],
84 "date": [
85 "Wed, 15 Jul 2020 17:23:28 GMT"
86 ],
87 "cache-control": [
88 "no-store"
89 ],
90 "content-type": [
91 "application/json"
92 ],
93 "transfer-encoding": [
94 "chunked"
95 ],
96 "strict-transport-security": [
97 "max-age=0; includeSubdomains; preload"
98 ],
99 "x-frame-options": [
100 "Deny"
101 ],
102 "referrer-policy": [
103 "no-referrer"
104 ],
105 "x-content-type-options": [
106 "nosniff"
107 ]
108 },
109 "body": {
110 "string": "{\"result\":{\"object\":{\"id\":6876,\"phid\":\"PHID-DREV-looitrxgt3omaau7a7qk\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-g2xkgr2sjkrmhcr\"},{\"phid\":\"PHID-XACT-DREV-lgbrex6poz6x5pk\"}]},\"error_code\":null,\"error_info\":null}"
111 },
112 "status": {
113 "message": "OK",
114 "code": 200
115 }
116 },
117 "request": {
118 "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
119 "body": "output=json&params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+%22PHID-DREV-looitrxgt3omaau7a7qk%22%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22plan-changes%22%2C+%22value%22%3A+true%7D%5D%7D&__conduit__=1",
120 "method": "POST",
121 "headers": {
122 "content-length": [
123 "278"
124 ],
125 "accept": [
126 "application/mercurial-0.1"
127 ],
128 "content-type": [
129 "application/x-www-form-urlencoded"
130 ],
131 "user-agent": [
132 "mercurial/proto-1.0 (Mercurial 5.4.2+207-8403cc54bc83+20200709)"
133 ],
134 "host": [
135 "phab.mercurial-scm.org"
136 ]
137 }
138 }
139 }
140 ]
141 } No newline at end of file
@@ -1,2285 +1,2293 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 [auth]
38 [auth]
39 example.schemes = https
39 example.schemes = https
40 example.prefix = phab.example.com
40 example.prefix = phab.example.com
41
41
42 # API token. Get it from https://$HOST/conduit/login/
42 # API token. Get it from https://$HOST/conduit/login/
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
43 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
44 """
44 """
45
45
46 from __future__ import absolute_import
46 from __future__ import absolute_import
47
47
48 import base64
48 import base64
49 import contextlib
49 import contextlib
50 import hashlib
50 import hashlib
51 import itertools
51 import itertools
52 import json
52 import json
53 import mimetypes
53 import mimetypes
54 import operator
54 import operator
55 import re
55 import re
56
56
57 from mercurial.node import bin, nullid, short
57 from mercurial.node import bin, nullid, short
58 from mercurial.i18n import _
58 from mercurial.i18n import _
59 from mercurial.pycompat import getattr
59 from mercurial.pycompat import getattr
60 from mercurial.thirdparty import attr
60 from mercurial.thirdparty import attr
61 from mercurial import (
61 from mercurial import (
62 cmdutil,
62 cmdutil,
63 context,
63 context,
64 copies,
64 copies,
65 encoding,
65 encoding,
66 error,
66 error,
67 exthelper,
67 exthelper,
68 graphmod,
68 graphmod,
69 httpconnection as httpconnectionmod,
69 httpconnection as httpconnectionmod,
70 localrepo,
70 localrepo,
71 logcmdutil,
71 logcmdutil,
72 match,
72 match,
73 mdiff,
73 mdiff,
74 obsutil,
74 obsutil,
75 parser,
75 parser,
76 patch,
76 patch,
77 phases,
77 phases,
78 pycompat,
78 pycompat,
79 scmutil,
79 scmutil,
80 smartset,
80 smartset,
81 tags,
81 tags,
82 templatefilters,
82 templatefilters,
83 templateutil,
83 templateutil,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 )
86 )
87 from mercurial.utils import (
87 from mercurial.utils import (
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91 from . import show
91 from . import show
92
92
93
93
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
94 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
95 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
96 # be specifying the version(s) of Mercurial they are tested with, or
96 # be specifying the version(s) of Mercurial they are tested with, or
97 # leave the attribute unspecified.
97 # leave the attribute unspecified.
98 testedwith = b'ships-with-hg-core'
98 testedwith = b'ships-with-hg-core'
99
99
100 eh = exthelper.exthelper()
100 eh = exthelper.exthelper()
101
101
102 cmdtable = eh.cmdtable
102 cmdtable = eh.cmdtable
103 command = eh.command
103 command = eh.command
104 configtable = eh.configtable
104 configtable = eh.configtable
105 templatekeyword = eh.templatekeyword
105 templatekeyword = eh.templatekeyword
106 uisetup = eh.finaluisetup
106 uisetup = eh.finaluisetup
107
107
108 # developer config: phabricator.batchsize
108 # developer config: phabricator.batchsize
109 eh.configitem(
109 eh.configitem(
110 b'phabricator', b'batchsize', default=12,
110 b'phabricator', b'batchsize', default=12,
111 )
111 )
112 eh.configitem(
112 eh.configitem(
113 b'phabricator', b'callsign', default=None,
113 b'phabricator', b'callsign', default=None,
114 )
114 )
115 eh.configitem(
115 eh.configitem(
116 b'phabricator', b'curlcmd', default=None,
116 b'phabricator', b'curlcmd', default=None,
117 )
117 )
118 # developer config: phabricator.debug
118 # developer config: phabricator.debug
119 eh.configitem(
119 eh.configitem(
120 b'phabricator', b'debug', default=False,
120 b'phabricator', b'debug', default=False,
121 )
121 )
122 # developer config: phabricator.repophid
122 # developer config: phabricator.repophid
123 eh.configitem(
123 eh.configitem(
124 b'phabricator', b'repophid', default=None,
124 b'phabricator', b'repophid', default=None,
125 )
125 )
126 eh.configitem(
126 eh.configitem(
127 b'phabricator', b'url', default=None,
127 b'phabricator', b'url', default=None,
128 )
128 )
129 eh.configitem(
129 eh.configitem(
130 b'phabsend', b'confirm', default=False,
130 b'phabsend', b'confirm', default=False,
131 )
131 )
132 eh.configitem(
132 eh.configitem(
133 b'phabimport', b'secret', default=False,
133 b'phabimport', b'secret', default=False,
134 )
134 )
135 eh.configitem(
135 eh.configitem(
136 b'phabimport', b'obsolete', default=False,
136 b'phabimport', b'obsolete', default=False,
137 )
137 )
138
138
139 colortable = {
139 colortable = {
140 b'phabricator.action.created': b'green',
140 b'phabricator.action.created': b'green',
141 b'phabricator.action.skipped': b'magenta',
141 b'phabricator.action.skipped': b'magenta',
142 b'phabricator.action.updated': b'magenta',
142 b'phabricator.action.updated': b'magenta',
143 b'phabricator.desc': b'',
143 b'phabricator.desc': b'',
144 b'phabricator.drev': b'bold',
144 b'phabricator.drev': b'bold',
145 b'phabricator.node': b'',
145 b'phabricator.node': b'',
146 b'phabricator.status.abandoned': b'magenta dim',
146 b'phabricator.status.abandoned': b'magenta dim',
147 b'phabricator.status.accepted': b'green bold',
147 b'phabricator.status.accepted': b'green bold',
148 b'phabricator.status.closed': b'green',
148 b'phabricator.status.closed': b'green',
149 b'phabricator.status.needsreview': b'yellow',
149 b'phabricator.status.needsreview': b'yellow',
150 b'phabricator.status.needsrevision': b'red',
150 b'phabricator.status.needsrevision': b'red',
151 b'phabricator.status.changesplanned': b'red',
151 b'phabricator.status.changesplanned': b'red',
152 }
152 }
153
153
154 _VCR_FLAGS = [
154 _VCR_FLAGS = [
155 (
155 (
156 b'',
156 b'',
157 b'test-vcr',
157 b'test-vcr',
158 b'',
158 b'',
159 _(
159 _(
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
160 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
161 b', otherwise will mock all http requests using the specified vcr file.'
161 b', otherwise will mock all http requests using the specified vcr file.'
162 b' (ADVANCED)'
162 b' (ADVANCED)'
163 ),
163 ),
164 ),
164 ),
165 ]
165 ]
166
166
167
167
168 @eh.wrapfunction(localrepo, "loadhgrc")
168 @eh.wrapfunction(localrepo, "loadhgrc")
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
169 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements):
170 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """Load ``.arcconfig`` content into a ui instance on repository open.
171 """
171 """
172 result = False
172 result = False
173 arcconfig = {}
173 arcconfig = {}
174
174
175 try:
175 try:
176 # json.loads only accepts bytes from 3.6+
176 # json.loads only accepts bytes from 3.6+
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
177 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
178 # json.loads only returns unicode strings
178 # json.loads only returns unicode strings
179 arcconfig = pycompat.rapply(
179 arcconfig = pycompat.rapply(
180 lambda x: encoding.unitolocal(x)
180 lambda x: encoding.unitolocal(x)
181 if isinstance(x, pycompat.unicode)
181 if isinstance(x, pycompat.unicode)
182 else x,
182 else x,
183 pycompat.json_loads(rawparams),
183 pycompat.json_loads(rawparams),
184 )
184 )
185
185
186 result = True
186 result = True
187 except ValueError:
187 except ValueError:
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
188 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
189 except IOError:
189 except IOError:
190 pass
190 pass
191
191
192 cfg = util.sortdict()
192 cfg = util.sortdict()
193
193
194 if b"repository.callsign" in arcconfig:
194 if b"repository.callsign" in arcconfig:
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
195 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
196
196
197 if b"phabricator.uri" in arcconfig:
197 if b"phabricator.uri" in arcconfig:
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
198 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
199
199
200 if cfg:
200 if cfg:
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
201 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
202
202
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
203 return orig(ui, wdirvfs, hgvfs, requirements) or result # Load .hg/hgrc
204
204
205
205
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
206 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
207 fullflags = flags + _VCR_FLAGS
207 fullflags = flags + _VCR_FLAGS
208
208
209 def hgmatcher(r1, r2):
209 def hgmatcher(r1, r2):
210 if r1.uri != r2.uri or r1.method != r2.method:
210 if r1.uri != r2.uri or r1.method != r2.method:
211 return False
211 return False
212 r1params = util.urlreq.parseqs(r1.body)
212 r1params = util.urlreq.parseqs(r1.body)
213 r2params = util.urlreq.parseqs(r2.body)
213 r2params = util.urlreq.parseqs(r2.body)
214 for key in r1params:
214 for key in r1params:
215 if key not in r2params:
215 if key not in r2params:
216 return False
216 return False
217 value = r1params[key][0]
217 value = r1params[key][0]
218 # we want to compare json payloads without worrying about ordering
218 # we want to compare json payloads without worrying about ordering
219 if value.startswith(b'{') and value.endswith(b'}'):
219 if value.startswith(b'{') and value.endswith(b'}'):
220 r1json = pycompat.json_loads(value)
220 r1json = pycompat.json_loads(value)
221 r2json = pycompat.json_loads(r2params[key][0])
221 r2json = pycompat.json_loads(r2params[key][0])
222 if r1json != r2json:
222 if r1json != r2json:
223 return False
223 return False
224 elif r2params[key][0] != value:
224 elif r2params[key][0] != value:
225 return False
225 return False
226 return True
226 return True
227
227
228 def sanitiserequest(request):
228 def sanitiserequest(request):
229 request.body = re.sub(
229 request.body = re.sub(
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
230 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
231 )
231 )
232 return request
232 return request
233
233
234 def sanitiseresponse(response):
234 def sanitiseresponse(response):
235 if 'set-cookie' in response['headers']:
235 if 'set-cookie' in response['headers']:
236 del response['headers']['set-cookie']
236 del response['headers']['set-cookie']
237 return response
237 return response
238
238
239 def decorate(fn):
239 def decorate(fn):
240 def inner(*args, **kwargs):
240 def inner(*args, **kwargs):
241 if kwargs.get('test_vcr'):
241 if kwargs.get('test_vcr'):
242 cassette = pycompat.fsdecode(kwargs.pop('test_vcr'))
242 cassette = pycompat.fsdecode(kwargs.pop('test_vcr'))
243 import hgdemandimport
243 import hgdemandimport
244
244
245 with hgdemandimport.deactivated():
245 with hgdemandimport.deactivated():
246 import vcr as vcrmod
246 import vcr as vcrmod
247 import vcr.stubs as stubs
247 import vcr.stubs as stubs
248
248
249 vcr = vcrmod.VCR(
249 vcr = vcrmod.VCR(
250 serializer='json',
250 serializer='json',
251 before_record_request=sanitiserequest,
251 before_record_request=sanitiserequest,
252 before_record_response=sanitiseresponse,
252 before_record_response=sanitiseresponse,
253 custom_patches=[
253 custom_patches=[
254 (
254 (
255 urlmod,
255 urlmod,
256 'httpconnection',
256 'httpconnection',
257 stubs.VCRHTTPConnection,
257 stubs.VCRHTTPConnection,
258 ),
258 ),
259 (
259 (
260 urlmod,
260 urlmod,
261 'httpsconnection',
261 'httpsconnection',
262 stubs.VCRHTTPSConnection,
262 stubs.VCRHTTPSConnection,
263 ),
263 ),
264 ],
264 ],
265 )
265 )
266 vcr.register_matcher('hgmatcher', hgmatcher)
266 vcr.register_matcher('hgmatcher', hgmatcher)
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
267 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
268 return fn(*args, **kwargs)
268 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
269 return fn(*args, **kwargs)
270
270
271 cmd = util.checksignature(inner, depth=2)
271 cmd = util.checksignature(inner, depth=2)
272 cmd.__name__ = fn.__name__
272 cmd.__name__ = fn.__name__
273 cmd.__doc__ = fn.__doc__
273 cmd.__doc__ = fn.__doc__
274
274
275 return command(
275 return command(
276 name,
276 name,
277 fullflags,
277 fullflags,
278 spec,
278 spec,
279 helpcategory=helpcategory,
279 helpcategory=helpcategory,
280 optionalrepo=optionalrepo,
280 optionalrepo=optionalrepo,
281 )(cmd)
281 )(cmd)
282
282
283 return decorate
283 return decorate
284
284
285
285
286 def _debug(ui, *msg, **opts):
286 def _debug(ui, *msg, **opts):
287 """write debug output for Phabricator if ``phabricator.debug`` is set
287 """write debug output for Phabricator if ``phabricator.debug`` is set
288
288
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
289 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
290 printed with the --debug argument.
290 printed with the --debug argument.
291 """
291 """
292 if ui.configbool(b"phabricator", b"debug"):
292 if ui.configbool(b"phabricator", b"debug"):
293 flag = ui.debugflag
293 flag = ui.debugflag
294 try:
294 try:
295 ui.debugflag = True
295 ui.debugflag = True
296 ui.write(*msg, **opts)
296 ui.write(*msg, **opts)
297 finally:
297 finally:
298 ui.debugflag = flag
298 ui.debugflag = flag
299
299
300
300
301 def urlencodenested(params):
301 def urlencodenested(params):
302 """like urlencode, but works with nested parameters.
302 """like urlencode, but works with nested parameters.
303
303
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
304 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
305 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
306 urlencode. Note: the encoding is consistent with PHP's http_build_query.
307 """
307 """
308 flatparams = util.sortdict()
308 flatparams = util.sortdict()
309
309
310 def process(prefix, obj):
310 def process(prefix, obj):
311 if isinstance(obj, bool):
311 if isinstance(obj, bool):
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
312 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
313 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
314 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
315 if items is None:
315 if items is None:
316 flatparams[prefix] = obj
316 flatparams[prefix] = obj
317 else:
317 else:
318 for k, v in items(obj):
318 for k, v in items(obj):
319 if prefix:
319 if prefix:
320 process(b'%s[%s]' % (prefix, k), v)
320 process(b'%s[%s]' % (prefix, k), v)
321 else:
321 else:
322 process(k, v)
322 process(k, v)
323
323
324 process(b'', params)
324 process(b'', params)
325 return util.urlreq.urlencode(flatparams)
325 return util.urlreq.urlencode(flatparams)
326
326
327
327
328 def readurltoken(ui):
328 def readurltoken(ui):
329 """return conduit url, token and make sure they exist
329 """return conduit url, token and make sure they exist
330
330
331 Currently read from [auth] config section. In the future, it might
331 Currently read from [auth] config section. In the future, it might
332 make sense to read from .arcconfig and .arcrc as well.
332 make sense to read from .arcconfig and .arcrc as well.
333 """
333 """
334 url = ui.config(b'phabricator', b'url')
334 url = ui.config(b'phabricator', b'url')
335 if not url:
335 if not url:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
337 _(b'config %s.%s is required') % (b'phabricator', b'url')
338 )
338 )
339
339
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
340 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
341 token = None
341 token = None
342
342
343 if res:
343 if res:
344 group, auth = res
344 group, auth = res
345
345
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
346 ui.debug(b"using auth.%s.* for authentication\n" % group)
347
347
348 token = auth.get(b'phabtoken')
348 token = auth.get(b'phabtoken')
349
349
350 if not token:
350 if not token:
351 raise error.Abort(
351 raise error.Abort(
352 _(b'Can\'t find conduit token associated to %s') % (url,)
352 _(b'Can\'t find conduit token associated to %s') % (url,)
353 )
353 )
354
354
355 return url, token
355 return url, token
356
356
357
357
358 def callconduit(ui, name, params):
358 def callconduit(ui, name, params):
359 """call Conduit API, params is a dict. return json.loads result, or None"""
359 """call Conduit API, params is a dict. return json.loads result, or None"""
360 host, token = readurltoken(ui)
360 host, token = readurltoken(ui)
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
361 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
362 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
363 params = params.copy()
363 params = params.copy()
364 params[b'__conduit__'] = {
364 params[b'__conduit__'] = {
365 b'token': token,
365 b'token': token,
366 }
366 }
367 rawdata = {
367 rawdata = {
368 b'params': templatefilters.json(params),
368 b'params': templatefilters.json(params),
369 b'output': b'json',
369 b'output': b'json',
370 b'__conduit__': 1,
370 b'__conduit__': 1,
371 }
371 }
372 data = urlencodenested(rawdata)
372 data = urlencodenested(rawdata)
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
373 curlcmd = ui.config(b'phabricator', b'curlcmd')
374 if curlcmd:
374 if curlcmd:
375 sin, sout = procutil.popen2(
375 sin, sout = procutil.popen2(
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
376 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
377 )
377 )
378 sin.write(data)
378 sin.write(data)
379 sin.close()
379 sin.close()
380 body = sout.read()
380 body = sout.read()
381 else:
381 else:
382 urlopener = urlmod.opener(ui, authinfo)
382 urlopener = urlmod.opener(ui, authinfo)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
383 request = util.urlreq.request(pycompat.strurl(url), data=data)
384 with contextlib.closing(urlopener.open(request)) as rsp:
384 with contextlib.closing(urlopener.open(request)) as rsp:
385 body = rsp.read()
385 body = rsp.read()
386 ui.debug(b'Conduit Response: %s\n' % body)
386 ui.debug(b'Conduit Response: %s\n' % body)
387 parsed = pycompat.rapply(
387 parsed = pycompat.rapply(
388 lambda x: encoding.unitolocal(x)
388 lambda x: encoding.unitolocal(x)
389 if isinstance(x, pycompat.unicode)
389 if isinstance(x, pycompat.unicode)
390 else x,
390 else x,
391 # json.loads only accepts bytes from py3.6+
391 # json.loads only accepts bytes from py3.6+
392 pycompat.json_loads(encoding.unifromlocal(body)),
392 pycompat.json_loads(encoding.unifromlocal(body)),
393 )
393 )
394 if parsed.get(b'error_code'):
394 if parsed.get(b'error_code'):
395 msg = _(b'Conduit Error (%s): %s') % (
395 msg = _(b'Conduit Error (%s): %s') % (
396 parsed[b'error_code'],
396 parsed[b'error_code'],
397 parsed[b'error_info'],
397 parsed[b'error_info'],
398 )
398 )
399 raise error.Abort(msg)
399 raise error.Abort(msg)
400 return parsed[b'result']
400 return parsed[b'result']
401
401
402
402
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
403 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
404 def debugcallconduit(ui, repo, name):
404 def debugcallconduit(ui, repo, name):
405 """call Conduit API
405 """call Conduit API
406
406
407 Call parameters are read from stdin as a JSON blob. Result will be written
407 Call parameters are read from stdin as a JSON blob. Result will be written
408 to stdout as a JSON blob.
408 to stdout as a JSON blob.
409 """
409 """
410 # json.loads only accepts bytes from 3.6+
410 # json.loads only accepts bytes from 3.6+
411 rawparams = encoding.unifromlocal(ui.fin.read())
411 rawparams = encoding.unifromlocal(ui.fin.read())
412 # json.loads only returns unicode strings
412 # json.loads only returns unicode strings
413 params = pycompat.rapply(
413 params = pycompat.rapply(
414 lambda x: encoding.unitolocal(x)
414 lambda x: encoding.unitolocal(x)
415 if isinstance(x, pycompat.unicode)
415 if isinstance(x, pycompat.unicode)
416 else x,
416 else x,
417 pycompat.json_loads(rawparams),
417 pycompat.json_loads(rawparams),
418 )
418 )
419 # json.dumps only accepts unicode strings
419 # json.dumps only accepts unicode strings
420 result = pycompat.rapply(
420 result = pycompat.rapply(
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
421 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
422 callconduit(ui, name, params),
422 callconduit(ui, name, params),
423 )
423 )
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
424 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
425 ui.write(b'%s\n' % encoding.unitolocal(s))
426
426
427
427
428 def getrepophid(repo):
428 def getrepophid(repo):
429 """given callsign, return repository PHID or None"""
429 """given callsign, return repository PHID or None"""
430 # developer config: phabricator.repophid
430 # developer config: phabricator.repophid
431 repophid = repo.ui.config(b'phabricator', b'repophid')
431 repophid = repo.ui.config(b'phabricator', b'repophid')
432 if repophid:
432 if repophid:
433 return repophid
433 return repophid
434 callsign = repo.ui.config(b'phabricator', b'callsign')
434 callsign = repo.ui.config(b'phabricator', b'callsign')
435 if not callsign:
435 if not callsign:
436 return None
436 return None
437 query = callconduit(
437 query = callconduit(
438 repo.ui,
438 repo.ui,
439 b'diffusion.repository.search',
439 b'diffusion.repository.search',
440 {b'constraints': {b'callsigns': [callsign]}},
440 {b'constraints': {b'callsigns': [callsign]}},
441 )
441 )
442 if len(query[b'data']) == 0:
442 if len(query[b'data']) == 0:
443 return None
443 return None
444 repophid = query[b'data'][0][b'phid']
444 repophid = query[b'data'][0][b'phid']
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
445 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
446 return repophid
446 return repophid
447
447
448
448
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
449 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
450 _differentialrevisiondescre = re.compile(
450 _differentialrevisiondescre = re.compile(
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
451 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
452 )
452 )
453
453
454
454
455 def getoldnodedrevmap(repo, nodelist):
455 def getoldnodedrevmap(repo, nodelist):
456 """find previous nodes that has been sent to Phabricator
456 """find previous nodes that has been sent to Phabricator
457
457
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
458 return {node: (oldnode, Differential diff, Differential Revision ID)}
459 for node in nodelist with known previous sent versions, or associated
459 for node in nodelist with known previous sent versions, or associated
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
460 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
461 be ``None``.
461 be ``None``.
462
462
463 Examines commit messages like "Differential Revision:" to get the
463 Examines commit messages like "Differential Revision:" to get the
464 association information.
464 association information.
465
465
466 If such commit message line is not found, examines all precursors and their
466 If such commit message line is not found, examines all precursors and their
467 tags. Tags with format like "D1234" are considered a match and the node
467 tags. Tags with format like "D1234" are considered a match and the node
468 with that tag, and the number after "D" (ex. 1234) will be returned.
468 with that tag, and the number after "D" (ex. 1234) will be returned.
469
469
470 The ``old node``, if not None, is guaranteed to be the last diff of
470 The ``old node``, if not None, is guaranteed to be the last diff of
471 corresponding Differential Revision, and exist in the repo.
471 corresponding Differential Revision, and exist in the repo.
472 """
472 """
473 unfi = repo.unfiltered()
473 unfi = repo.unfiltered()
474 has_node = unfi.changelog.index.has_node
474 has_node = unfi.changelog.index.has_node
475
475
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
476 result = {} # {node: (oldnode?, lastdiff?, drev)}
477 # ordered for test stability when printing new -> old mapping below
477 # ordered for test stability when printing new -> old mapping below
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
478 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
479 for node in nodelist:
479 for node in nodelist:
480 ctx = unfi[node]
480 ctx = unfi[node]
481 # For tags like "D123", put them into "toconfirm" to verify later
481 # For tags like "D123", put them into "toconfirm" to verify later
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
482 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
483 for n in precnodes:
483 for n in precnodes:
484 if has_node(n):
484 if has_node(n):
485 for tag in unfi.nodetags(n):
485 for tag in unfi.nodetags(n):
486 m = _differentialrevisiontagre.match(tag)
486 m = _differentialrevisiontagre.match(tag)
487 if m:
487 if m:
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
488 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
489 break
489 break
490 else:
490 else:
491 continue # move to next predecessor
491 continue # move to next predecessor
492 break # found a tag, stop
492 break # found a tag, stop
493 else:
493 else:
494 # Check commit message
494 # Check commit message
495 m = _differentialrevisiondescre.search(ctx.description())
495 m = _differentialrevisiondescre.search(ctx.description())
496 if m:
496 if m:
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
497 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
498
498
499 # Double check if tags are genuine by collecting all old nodes from
499 # Double check if tags are genuine by collecting all old nodes from
500 # Phabricator, and expect precursors overlap with it.
500 # Phabricator, and expect precursors overlap with it.
501 if toconfirm:
501 if toconfirm:
502 drevs = [drev for force, precs, drev in toconfirm.values()]
502 drevs = [drev for force, precs, drev in toconfirm.values()]
503 alldiffs = callconduit(
503 alldiffs = callconduit(
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
504 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
505 )
505 )
506
506
507 def getnodes(d, precset):
507 def getnodes(d, precset):
508 # Ignore other nodes that were combined into the Differential
508 # Ignore other nodes that were combined into the Differential
509 # that aren't predecessors of the current local node.
509 # that aren't predecessors of the current local node.
510 return [n for n in getlocalcommits(d) if n in precset]
510 return [n for n in getlocalcommits(d) if n in precset]
511
511
512 for newnode, (force, precset, drev) in toconfirm.items():
512 for newnode, (force, precset, drev) in toconfirm.items():
513 diffs = [
513 diffs = [
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
514 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
515 ]
515 ]
516
516
517 # local predecessors known by Phabricator
517 # local predecessors known by Phabricator
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
518 phprecset = {n for d in diffs for n in getnodes(d, precset)}
519
519
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
520 # Ignore if precursors (Phabricator and local repo) do not overlap,
521 # and force is not set (when commit message says nothing)
521 # and force is not set (when commit message says nothing)
522 if not force and not phprecset:
522 if not force and not phprecset:
523 tagname = b'D%d' % drev
523 tagname = b'D%d' % drev
524 tags.tag(
524 tags.tag(
525 repo,
525 repo,
526 tagname,
526 tagname,
527 nullid,
527 nullid,
528 message=None,
528 message=None,
529 user=None,
529 user=None,
530 date=None,
530 date=None,
531 local=True,
531 local=True,
532 )
532 )
533 unfi.ui.warn(
533 unfi.ui.warn(
534 _(
534 _(
535 b'D%d: local tag removed - does not match '
535 b'D%d: local tag removed - does not match '
536 b'Differential history\n'
536 b'Differential history\n'
537 )
537 )
538 % drev
538 % drev
539 )
539 )
540 continue
540 continue
541
541
542 # Find the last node using Phabricator metadata, and make sure it
542 # Find the last node using Phabricator metadata, and make sure it
543 # exists in the repo
543 # exists in the repo
544 oldnode = lastdiff = None
544 oldnode = lastdiff = None
545 if diffs:
545 if diffs:
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
546 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
547 oldnodes = getnodes(lastdiff, precset)
547 oldnodes = getnodes(lastdiff, precset)
548
548
549 _debug(
549 _debug(
550 unfi.ui,
550 unfi.ui,
551 b"%s mapped to old nodes %s\n"
551 b"%s mapped to old nodes %s\n"
552 % (
552 % (
553 short(newnode),
553 short(newnode),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
554 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
555 ),
555 ),
556 )
556 )
557
557
558 # If this commit was the result of `hg fold` after submission,
558 # If this commit was the result of `hg fold` after submission,
559 # and now resubmitted with --fold, the easiest thing to do is
559 # and now resubmitted with --fold, the easiest thing to do is
560 # to leave the node clear. This only results in creating a new
560 # to leave the node clear. This only results in creating a new
561 # diff for the _same_ Differential Revision if this commit is
561 # diff for the _same_ Differential Revision if this commit is
562 # the first or last in the selected range. If we picked a node
562 # the first or last in the selected range. If we picked a node
563 # from the list instead, it would have to be the lowest if at
563 # from the list instead, it would have to be the lowest if at
564 # the beginning of the --fold range, or the highest at the end.
564 # the beginning of the --fold range, or the highest at the end.
565 # Otherwise, one or more of the nodes wouldn't be considered in
565 # Otherwise, one or more of the nodes wouldn't be considered in
566 # the diff, and the Differential wouldn't be properly updated.
566 # the diff, and the Differential wouldn't be properly updated.
567 # If this commit is the result of `hg split` in the same
567 # If this commit is the result of `hg split` in the same
568 # scenario, there is a single oldnode here (and multiple
568 # scenario, there is a single oldnode here (and multiple
569 # newnodes mapped to it). That makes it the same as the normal
569 # newnodes mapped to it). That makes it the same as the normal
570 # case, as the edges of the newnode range cleanly maps to one
570 # case, as the edges of the newnode range cleanly maps to one
571 # oldnode each.
571 # oldnode each.
572 if len(oldnodes) == 1:
572 if len(oldnodes) == 1:
573 oldnode = oldnodes[0]
573 oldnode = oldnodes[0]
574 if oldnode and not has_node(oldnode):
574 if oldnode and not has_node(oldnode):
575 oldnode = None
575 oldnode = None
576
576
577 result[newnode] = (oldnode, lastdiff, drev)
577 result[newnode] = (oldnode, lastdiff, drev)
578
578
579 return result
579 return result
580
580
581
581
582 def getdrevmap(repo, revs):
582 def getdrevmap(repo, revs):
583 """Return a dict mapping each rev in `revs` to their Differential Revision
583 """Return a dict mapping each rev in `revs` to their Differential Revision
584 ID or None.
584 ID or None.
585 """
585 """
586 result = {}
586 result = {}
587 for rev in revs:
587 for rev in revs:
588 result[rev] = None
588 result[rev] = None
589 ctx = repo[rev]
589 ctx = repo[rev]
590 # Check commit message
590 # Check commit message
591 m = _differentialrevisiondescre.search(ctx.description())
591 m = _differentialrevisiondescre.search(ctx.description())
592 if m:
592 if m:
593 result[rev] = int(m.group('id'))
593 result[rev] = int(m.group('id'))
594 continue
594 continue
595 # Check tags
595 # Check tags
596 for tag in repo.nodetags(ctx.node()):
596 for tag in repo.nodetags(ctx.node()):
597 m = _differentialrevisiontagre.match(tag)
597 m = _differentialrevisiontagre.match(tag)
598 if m:
598 if m:
599 result[rev] = int(m.group(1))
599 result[rev] = int(m.group(1))
600 break
600 break
601
601
602 return result
602 return result
603
603
604
604
605 def getdiff(basectx, ctx, diffopts):
605 def getdiff(basectx, ctx, diffopts):
606 """plain-text diff without header (user, commit message, etc)"""
606 """plain-text diff without header (user, commit message, etc)"""
607 output = util.stringio()
607 output = util.stringio()
608 for chunk, _label in patch.diffui(
608 for chunk, _label in patch.diffui(
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
609 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
610 ):
610 ):
611 output.write(chunk)
611 output.write(chunk)
612 return output.getvalue()
612 return output.getvalue()
613
613
614
614
615 class DiffChangeType(object):
615 class DiffChangeType(object):
616 ADD = 1
616 ADD = 1
617 CHANGE = 2
617 CHANGE = 2
618 DELETE = 3
618 DELETE = 3
619 MOVE_AWAY = 4
619 MOVE_AWAY = 4
620 COPY_AWAY = 5
620 COPY_AWAY = 5
621 MOVE_HERE = 6
621 MOVE_HERE = 6
622 COPY_HERE = 7
622 COPY_HERE = 7
623 MULTICOPY = 8
623 MULTICOPY = 8
624
624
625
625
626 class DiffFileType(object):
626 class DiffFileType(object):
627 TEXT = 1
627 TEXT = 1
628 IMAGE = 2
628 IMAGE = 2
629 BINARY = 3
629 BINARY = 3
630
630
631
631
632 @attr.s
632 @attr.s
633 class phabhunk(dict):
633 class phabhunk(dict):
634 """Represents a Differential hunk, which is owned by a Differential change
634 """Represents a Differential hunk, which is owned by a Differential change
635 """
635 """
636
636
637 oldOffset = attr.ib(default=0) # camelcase-required
637 oldOffset = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
638 oldLength = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
639 newOffset = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
640 newLength = attr.ib(default=0) # camelcase-required
641 corpus = attr.ib(default='')
641 corpus = attr.ib(default='')
642 # These get added to the phabchange's equivalents
642 # These get added to the phabchange's equivalents
643 addLines = attr.ib(default=0) # camelcase-required
643 addLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
644 delLines = attr.ib(default=0) # camelcase-required
645
645
646
646
647 @attr.s
647 @attr.s
648 class phabchange(object):
648 class phabchange(object):
649 """Represents a Differential change, owns Differential hunks and owned by a
649 """Represents a Differential change, owns Differential hunks and owned by a
650 Differential diff. Each one represents one file in a diff.
650 Differential diff. Each one represents one file in a diff.
651 """
651 """
652
652
653 currentPath = attr.ib(default=None) # camelcase-required
653 currentPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
654 oldPath = attr.ib(default=None) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
655 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
656 metadata = attr.ib(default=attr.Factory(dict))
656 metadata = attr.ib(default=attr.Factory(dict))
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
657 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
658 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
659 type = attr.ib(default=DiffChangeType.CHANGE)
659 type = attr.ib(default=DiffChangeType.CHANGE)
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
660 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
661 commitHash = attr.ib(default=None) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
662 addLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
663 delLines = attr.ib(default=0) # camelcase-required
664 hunks = attr.ib(default=attr.Factory(list))
664 hunks = attr.ib(default=attr.Factory(list))
665
665
666 def copynewmetadatatoold(self):
666 def copynewmetadatatoold(self):
667 for key in list(self.metadata.keys()):
667 for key in list(self.metadata.keys()):
668 newkey = key.replace(b'new:', b'old:')
668 newkey = key.replace(b'new:', b'old:')
669 self.metadata[newkey] = self.metadata[key]
669 self.metadata[newkey] = self.metadata[key]
670
670
671 def addoldmode(self, value):
671 def addoldmode(self, value):
672 self.oldProperties[b'unix:filemode'] = value
672 self.oldProperties[b'unix:filemode'] = value
673
673
674 def addnewmode(self, value):
674 def addnewmode(self, value):
675 self.newProperties[b'unix:filemode'] = value
675 self.newProperties[b'unix:filemode'] = value
676
676
677 def addhunk(self, hunk):
677 def addhunk(self, hunk):
678 if not isinstance(hunk, phabhunk):
678 if not isinstance(hunk, phabhunk):
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
679 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
680 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
681 # It's useful to include these stats since the Phab web UI shows them,
681 # It's useful to include these stats since the Phab web UI shows them,
682 # and uses them to estimate how large a change a Revision is. Also used
682 # and uses them to estimate how large a change a Revision is. Also used
683 # in email subjects for the [+++--] bit.
683 # in email subjects for the [+++--] bit.
684 self.addLines += hunk.addLines
684 self.addLines += hunk.addLines
685 self.delLines += hunk.delLines
685 self.delLines += hunk.delLines
686
686
687
687
688 @attr.s
688 @attr.s
689 class phabdiff(object):
689 class phabdiff(object):
690 """Represents a Differential diff, owns Differential changes. Corresponds
690 """Represents a Differential diff, owns Differential changes. Corresponds
691 to a commit.
691 to a commit.
692 """
692 """
693
693
694 # Doesn't seem to be any reason to send this (output of uname -n)
694 # Doesn't seem to be any reason to send this (output of uname -n)
695 sourceMachine = attr.ib(default=b'') # camelcase-required
695 sourceMachine = attr.ib(default=b'') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
696 sourcePath = attr.ib(default=b'/') # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
697 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
698 sourceControlPath = attr.ib(default=b'/') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
699 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
700 branch = attr.ib(default=b'default')
700 branch = attr.ib(default=b'default')
701 bookmark = attr.ib(default=None)
701 bookmark = attr.ib(default=None)
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
702 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
703 lintStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
704 unitStatus = attr.ib(default=b'none') # camelcase-required
705 changes = attr.ib(default=attr.Factory(dict))
705 changes = attr.ib(default=attr.Factory(dict))
706 repositoryPHID = attr.ib(default=None) # camelcase-required
706 repositoryPHID = attr.ib(default=None) # camelcase-required
707
707
708 def addchange(self, change):
708 def addchange(self, change):
709 if not isinstance(change, phabchange):
709 if not isinstance(change, phabchange):
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
710 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
711 self.changes[change.currentPath] = pycompat.byteskwargs(
711 self.changes[change.currentPath] = pycompat.byteskwargs(
712 attr.asdict(change)
712 attr.asdict(change)
713 )
713 )
714
714
715
715
716 def maketext(pchange, basectx, ctx, fname):
716 def maketext(pchange, basectx, ctx, fname):
717 """populate the phabchange for a text file"""
717 """populate the phabchange for a text file"""
718 repo = ctx.repo()
718 repo = ctx.repo()
719 fmatcher = match.exact([fname])
719 fmatcher = match.exact([fname])
720 diffopts = mdiff.diffopts(git=True, context=32767)
720 diffopts = mdiff.diffopts(git=True, context=32767)
721 _pfctx, _fctx, header, fhunks = next(
721 _pfctx, _fctx, header, fhunks = next(
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
722 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
723 )
723 )
724
724
725 for fhunk in fhunks:
725 for fhunk in fhunks:
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
726 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
727 corpus = b''.join(lines[1:])
727 corpus = b''.join(lines[1:])
728 shunk = list(header)
728 shunk = list(header)
729 shunk.extend(lines)
729 shunk.extend(lines)
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
730 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
731 patch.diffstatdata(util.iterlines(shunk))
731 patch.diffstatdata(util.iterlines(shunk))
732 )
732 )
733 pchange.addhunk(
733 pchange.addhunk(
734 phabhunk(
734 phabhunk(
735 oldOffset,
735 oldOffset,
736 oldLength,
736 oldLength,
737 newOffset,
737 newOffset,
738 newLength,
738 newLength,
739 corpus,
739 corpus,
740 addLines,
740 addLines,
741 delLines,
741 delLines,
742 )
742 )
743 )
743 )
744
744
745
745
746 def uploadchunks(fctx, fphid):
746 def uploadchunks(fctx, fphid):
747 """upload large binary files as separate chunks.
747 """upload large binary files as separate chunks.
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
748 Phab requests chunking over 8MiB, and splits into 4MiB chunks
749 """
749 """
750 ui = fctx.repo().ui
750 ui = fctx.repo().ui
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
751 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
752 with ui.makeprogress(
752 with ui.makeprogress(
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
753 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
754 ) as progress:
754 ) as progress:
755 for chunk in chunks:
755 for chunk in chunks:
756 progress.increment()
756 progress.increment()
757 if chunk[b'complete']:
757 if chunk[b'complete']:
758 continue
758 continue
759 bstart = int(chunk[b'byteStart'])
759 bstart = int(chunk[b'byteStart'])
760 bend = int(chunk[b'byteEnd'])
760 bend = int(chunk[b'byteEnd'])
761 callconduit(
761 callconduit(
762 ui,
762 ui,
763 b'file.uploadchunk',
763 b'file.uploadchunk',
764 {
764 {
765 b'filePHID': fphid,
765 b'filePHID': fphid,
766 b'byteStart': bstart,
766 b'byteStart': bstart,
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
767 b'data': base64.b64encode(fctx.data()[bstart:bend]),
768 b'dataEncoding': b'base64',
768 b'dataEncoding': b'base64',
769 },
769 },
770 )
770 )
771
771
772
772
773 def uploadfile(fctx):
773 def uploadfile(fctx):
774 """upload binary files to Phabricator"""
774 """upload binary files to Phabricator"""
775 repo = fctx.repo()
775 repo = fctx.repo()
776 ui = repo.ui
776 ui = repo.ui
777 fname = fctx.path()
777 fname = fctx.path()
778 size = fctx.size()
778 size = fctx.size()
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
779 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
780
780
781 # an allocate call is required first to see if an upload is even required
781 # an allocate call is required first to see if an upload is even required
782 # (Phab might already have it) and to determine if chunking is needed
782 # (Phab might already have it) and to determine if chunking is needed
783 allocateparams = {
783 allocateparams = {
784 b'name': fname,
784 b'name': fname,
785 b'contentLength': size,
785 b'contentLength': size,
786 b'contentHash': fhash,
786 b'contentHash': fhash,
787 }
787 }
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
788 filealloc = callconduit(ui, b'file.allocate', allocateparams)
789 fphid = filealloc[b'filePHID']
789 fphid = filealloc[b'filePHID']
790
790
791 if filealloc[b'upload']:
791 if filealloc[b'upload']:
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
792 ui.write(_(b'uploading %s\n') % bytes(fctx))
793 if not fphid:
793 if not fphid:
794 uploadparams = {
794 uploadparams = {
795 b'name': fname,
795 b'name': fname,
796 b'data_base64': base64.b64encode(fctx.data()),
796 b'data_base64': base64.b64encode(fctx.data()),
797 }
797 }
798 fphid = callconduit(ui, b'file.upload', uploadparams)
798 fphid = callconduit(ui, b'file.upload', uploadparams)
799 else:
799 else:
800 uploadchunks(fctx, fphid)
800 uploadchunks(fctx, fphid)
801 else:
801 else:
802 ui.debug(b'server already has %s\n' % bytes(fctx))
802 ui.debug(b'server already has %s\n' % bytes(fctx))
803
803
804 if not fphid:
804 if not fphid:
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
805 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
806
806
807 return fphid
807 return fphid
808
808
809
809
810 def addoldbinary(pchange, oldfctx, fctx):
810 def addoldbinary(pchange, oldfctx, fctx):
811 """add the metadata for the previous version of a binary file to the
811 """add the metadata for the previous version of a binary file to the
812 phabchange for the new version
812 phabchange for the new version
813
813
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
814 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
815 version of the file, or None if the file is being removed.
815 version of the file, or None if the file is being removed.
816 """
816 """
817 if not fctx or fctx.cmp(oldfctx):
817 if not fctx or fctx.cmp(oldfctx):
818 # Files differ, add the old one
818 # Files differ, add the old one
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
819 pchange.metadata[b'old:file:size'] = oldfctx.size()
820 mimeguess, _enc = mimetypes.guess_type(
820 mimeguess, _enc = mimetypes.guess_type(
821 encoding.unifromlocal(oldfctx.path())
821 encoding.unifromlocal(oldfctx.path())
822 )
822 )
823 if mimeguess:
823 if mimeguess:
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
824 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
825 mimeguess
825 mimeguess
826 )
826 )
827 fphid = uploadfile(oldfctx)
827 fphid = uploadfile(oldfctx)
828 pchange.metadata[b'old:binary-phid'] = fphid
828 pchange.metadata[b'old:binary-phid'] = fphid
829 else:
829 else:
830 # If it's left as IMAGE/BINARY web UI might try to display it
830 # If it's left as IMAGE/BINARY web UI might try to display it
831 pchange.fileType = DiffFileType.TEXT
831 pchange.fileType = DiffFileType.TEXT
832 pchange.copynewmetadatatoold()
832 pchange.copynewmetadatatoold()
833
833
834
834
835 def makebinary(pchange, fctx):
835 def makebinary(pchange, fctx):
836 """populate the phabchange for a binary file"""
836 """populate the phabchange for a binary file"""
837 pchange.fileType = DiffFileType.BINARY
837 pchange.fileType = DiffFileType.BINARY
838 fphid = uploadfile(fctx)
838 fphid = uploadfile(fctx)
839 pchange.metadata[b'new:binary-phid'] = fphid
839 pchange.metadata[b'new:binary-phid'] = fphid
840 pchange.metadata[b'new:file:size'] = fctx.size()
840 pchange.metadata[b'new:file:size'] = fctx.size()
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
841 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
842 if mimeguess:
842 if mimeguess:
843 mimeguess = pycompat.bytestr(mimeguess)
843 mimeguess = pycompat.bytestr(mimeguess)
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
844 pchange.metadata[b'new:file:mime-type'] = mimeguess
845 if mimeguess.startswith(b'image/'):
845 if mimeguess.startswith(b'image/'):
846 pchange.fileType = DiffFileType.IMAGE
846 pchange.fileType = DiffFileType.IMAGE
847
847
848
848
849 # Copied from mercurial/patch.py
849 # Copied from mercurial/patch.py
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
850 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
851
851
852
852
853 def notutf8(fctx):
853 def notutf8(fctx):
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
854 """detect non-UTF-8 text files since Phabricator requires them to be marked
855 as binary
855 as binary
856 """
856 """
857 try:
857 try:
858 fctx.data().decode('utf-8')
858 fctx.data().decode('utf-8')
859 return False
859 return False
860 except UnicodeDecodeError:
860 except UnicodeDecodeError:
861 fctx.repo().ui.write(
861 fctx.repo().ui.write(
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
862 _(b'file %s detected as non-UTF-8, marked as binary\n')
863 % fctx.path()
863 % fctx.path()
864 )
864 )
865 return True
865 return True
866
866
867
867
868 def addremoved(pdiff, basectx, ctx, removed):
868 def addremoved(pdiff, basectx, ctx, removed):
869 """add removed files to the phabdiff. Shouldn't include moves"""
869 """add removed files to the phabdiff. Shouldn't include moves"""
870 for fname in removed:
870 for fname in removed:
871 pchange = phabchange(
871 pchange = phabchange(
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
872 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
873 )
873 )
874 oldfctx = basectx.p1()[fname]
874 oldfctx = basectx.p1()[fname]
875 pchange.addoldmode(gitmode[oldfctx.flags()])
875 pchange.addoldmode(gitmode[oldfctx.flags()])
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
876 if not (oldfctx.isbinary() or notutf8(oldfctx)):
877 maketext(pchange, basectx, ctx, fname)
877 maketext(pchange, basectx, ctx, fname)
878
878
879 pdiff.addchange(pchange)
879 pdiff.addchange(pchange)
880
880
881
881
882 def addmodified(pdiff, basectx, ctx, modified):
882 def addmodified(pdiff, basectx, ctx, modified):
883 """add modified files to the phabdiff"""
883 """add modified files to the phabdiff"""
884 for fname in modified:
884 for fname in modified:
885 fctx = ctx[fname]
885 fctx = ctx[fname]
886 oldfctx = basectx.p1()[fname]
886 oldfctx = basectx.p1()[fname]
887 pchange = phabchange(currentPath=fname, oldPath=fname)
887 pchange = phabchange(currentPath=fname, oldPath=fname)
888 filemode = gitmode[fctx.flags()]
888 filemode = gitmode[fctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
889 originalmode = gitmode[oldfctx.flags()]
890 if filemode != originalmode:
890 if filemode != originalmode:
891 pchange.addoldmode(originalmode)
891 pchange.addoldmode(originalmode)
892 pchange.addnewmode(filemode)
892 pchange.addnewmode(filemode)
893
893
894 if (
894 if (
895 fctx.isbinary()
895 fctx.isbinary()
896 or notutf8(fctx)
896 or notutf8(fctx)
897 or oldfctx.isbinary()
897 or oldfctx.isbinary()
898 or notutf8(oldfctx)
898 or notutf8(oldfctx)
899 ):
899 ):
900 makebinary(pchange, fctx)
900 makebinary(pchange, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
901 addoldbinary(pchange, oldfctx, fctx)
902 else:
902 else:
903 maketext(pchange, basectx, ctx, fname)
903 maketext(pchange, basectx, ctx, fname)
904
904
905 pdiff.addchange(pchange)
905 pdiff.addchange(pchange)
906
906
907
907
908 def addadded(pdiff, basectx, ctx, added, removed):
908 def addadded(pdiff, basectx, ctx, added, removed):
909 """add file adds to the phabdiff, both new files and copies/moves"""
909 """add file adds to the phabdiff, both new files and copies/moves"""
910 # Keep track of files that've been recorded as moved/copied, so if there are
910 # Keep track of files that've been recorded as moved/copied, so if there are
911 # additional copies we can mark them (moves get removed from removed)
911 # additional copies we can mark them (moves get removed from removed)
912 copiedchanges = {}
912 copiedchanges = {}
913 movedchanges = {}
913 movedchanges = {}
914
914
915 copy = {}
915 copy = {}
916 if basectx != ctx:
916 if basectx != ctx:
917 copy = copies.pathcopies(basectx.p1(), ctx)
917 copy = copies.pathcopies(basectx.p1(), ctx)
918
918
919 for fname in added:
919 for fname in added:
920 fctx = ctx[fname]
920 fctx = ctx[fname]
921 oldfctx = None
921 oldfctx = None
922 pchange = phabchange(currentPath=fname)
922 pchange = phabchange(currentPath=fname)
923
923
924 filemode = gitmode[fctx.flags()]
924 filemode = gitmode[fctx.flags()]
925
925
926 if copy:
926 if copy:
927 originalfname = copy.get(fname, fname)
927 originalfname = copy.get(fname, fname)
928 else:
928 else:
929 originalfname = fname
929 originalfname = fname
930 if fctx.renamed():
930 if fctx.renamed():
931 originalfname = fctx.renamed()[0]
931 originalfname = fctx.renamed()[0]
932
932
933 renamed = fname != originalfname
933 renamed = fname != originalfname
934
934
935 if renamed:
935 if renamed:
936 oldfctx = basectx.p1()[originalfname]
936 oldfctx = basectx.p1()[originalfname]
937 originalmode = gitmode[oldfctx.flags()]
937 originalmode = gitmode[oldfctx.flags()]
938 pchange.oldPath = originalfname
938 pchange.oldPath = originalfname
939
939
940 if originalfname in removed:
940 if originalfname in removed:
941 origpchange = phabchange(
941 origpchange = phabchange(
942 currentPath=originalfname,
942 currentPath=originalfname,
943 oldPath=originalfname,
943 oldPath=originalfname,
944 type=DiffChangeType.MOVE_AWAY,
944 type=DiffChangeType.MOVE_AWAY,
945 awayPaths=[fname],
945 awayPaths=[fname],
946 )
946 )
947 movedchanges[originalfname] = origpchange
947 movedchanges[originalfname] = origpchange
948 removed.remove(originalfname)
948 removed.remove(originalfname)
949 pchange.type = DiffChangeType.MOVE_HERE
949 pchange.type = DiffChangeType.MOVE_HERE
950 elif originalfname in movedchanges:
950 elif originalfname in movedchanges:
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
951 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
952 movedchanges[originalfname].awayPaths.append(fname)
952 movedchanges[originalfname].awayPaths.append(fname)
953 pchange.type = DiffChangeType.COPY_HERE
953 pchange.type = DiffChangeType.COPY_HERE
954 else: # pure copy
954 else: # pure copy
955 if originalfname not in copiedchanges:
955 if originalfname not in copiedchanges:
956 origpchange = phabchange(
956 origpchange = phabchange(
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
957 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
958 )
958 )
959 copiedchanges[originalfname] = origpchange
959 copiedchanges[originalfname] = origpchange
960 else:
960 else:
961 origpchange = copiedchanges[originalfname]
961 origpchange = copiedchanges[originalfname]
962 origpchange.awayPaths.append(fname)
962 origpchange.awayPaths.append(fname)
963 pchange.type = DiffChangeType.COPY_HERE
963 pchange.type = DiffChangeType.COPY_HERE
964
964
965 if filemode != originalmode:
965 if filemode != originalmode:
966 pchange.addoldmode(originalmode)
966 pchange.addoldmode(originalmode)
967 pchange.addnewmode(filemode)
967 pchange.addnewmode(filemode)
968 else: # Brand-new file
968 else: # Brand-new file
969 pchange.addnewmode(gitmode[fctx.flags()])
969 pchange.addnewmode(gitmode[fctx.flags()])
970 pchange.type = DiffChangeType.ADD
970 pchange.type = DiffChangeType.ADD
971
971
972 if (
972 if (
973 fctx.isbinary()
973 fctx.isbinary()
974 or notutf8(fctx)
974 or notutf8(fctx)
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
975 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
976 ):
976 ):
977 makebinary(pchange, fctx)
977 makebinary(pchange, fctx)
978 if renamed:
978 if renamed:
979 addoldbinary(pchange, oldfctx, fctx)
979 addoldbinary(pchange, oldfctx, fctx)
980 else:
980 else:
981 maketext(pchange, basectx, ctx, fname)
981 maketext(pchange, basectx, ctx, fname)
982
982
983 pdiff.addchange(pchange)
983 pdiff.addchange(pchange)
984
984
985 for _path, copiedchange in copiedchanges.items():
985 for _path, copiedchange in copiedchanges.items():
986 pdiff.addchange(copiedchange)
986 pdiff.addchange(copiedchange)
987 for _path, movedchange in movedchanges.items():
987 for _path, movedchange in movedchanges.items():
988 pdiff.addchange(movedchange)
988 pdiff.addchange(movedchange)
989
989
990
990
991 def creatediff(basectx, ctx):
991 def creatediff(basectx, ctx):
992 """create a Differential Diff"""
992 """create a Differential Diff"""
993 repo = ctx.repo()
993 repo = ctx.repo()
994 repophid = getrepophid(repo)
994 repophid = getrepophid(repo)
995 # Create a "Differential Diff" via "differential.creatediff" API
995 # Create a "Differential Diff" via "differential.creatediff" API
996 pdiff = phabdiff(
996 pdiff = phabdiff(
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
997 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
998 branch=b'%s' % ctx.branch(),
998 branch=b'%s' % ctx.branch(),
999 )
999 )
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1000 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1001 # addadded will remove moved files from removed, so addremoved won't get
1001 # addadded will remove moved files from removed, so addremoved won't get
1002 # them
1002 # them
1003 addadded(pdiff, basectx, ctx, added, removed)
1003 addadded(pdiff, basectx, ctx, added, removed)
1004 addmodified(pdiff, basectx, ctx, modified)
1004 addmodified(pdiff, basectx, ctx, modified)
1005 addremoved(pdiff, basectx, ctx, removed)
1005 addremoved(pdiff, basectx, ctx, removed)
1006 if repophid:
1006 if repophid:
1007 pdiff.repositoryPHID = repophid
1007 pdiff.repositoryPHID = repophid
1008 diff = callconduit(
1008 diff = callconduit(
1009 repo.ui,
1009 repo.ui,
1010 b'differential.creatediff',
1010 b'differential.creatediff',
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1011 pycompat.byteskwargs(attr.asdict(pdiff)),
1012 )
1012 )
1013 if not diff:
1013 if not diff:
1014 if basectx != ctx:
1014 if basectx != ctx:
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1015 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1016 else:
1016 else:
1017 msg = _(b'cannot create diff for %s') % ctx
1017 msg = _(b'cannot create diff for %s') % ctx
1018 raise error.Abort(msg)
1018 raise error.Abort(msg)
1019 return diff
1019 return diff
1020
1020
1021
1021
1022 def writediffproperties(ctxs, diff):
1022 def writediffproperties(ctxs, diff):
1023 """write metadata to diff so patches could be applied losslessly
1023 """write metadata to diff so patches could be applied losslessly
1024
1024
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1025 ``ctxs`` is the list of commits that created the diff, in ascending order.
1026 The list is generally a single commit, but may be several when using
1026 The list is generally a single commit, but may be several when using
1027 ``phabsend --fold``.
1027 ``phabsend --fold``.
1028 """
1028 """
1029 # creatediff returns with a diffid but query returns with an id
1029 # creatediff returns with a diffid but query returns with an id
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1030 diffid = diff.get(b'diffid', diff.get(b'id'))
1031 basectx = ctxs[0]
1031 basectx = ctxs[0]
1032 tipctx = ctxs[-1]
1032 tipctx = ctxs[-1]
1033
1033
1034 params = {
1034 params = {
1035 b'diff_id': diffid,
1035 b'diff_id': diffid,
1036 b'name': b'hg:meta',
1036 b'name': b'hg:meta',
1037 b'data': templatefilters.json(
1037 b'data': templatefilters.json(
1038 {
1038 {
1039 b'user': tipctx.user(),
1039 b'user': tipctx.user(),
1040 b'date': b'%d %d' % tipctx.date(),
1040 b'date': b'%d %d' % tipctx.date(),
1041 b'branch': tipctx.branch(),
1041 b'branch': tipctx.branch(),
1042 b'node': tipctx.hex(),
1042 b'node': tipctx.hex(),
1043 b'parent': basectx.p1().hex(),
1043 b'parent': basectx.p1().hex(),
1044 }
1044 }
1045 ),
1045 ),
1046 }
1046 }
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1047 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1048
1048
1049 commits = {}
1049 commits = {}
1050 for ctx in ctxs:
1050 for ctx in ctxs:
1051 commits[ctx.hex()] = {
1051 commits[ctx.hex()] = {
1052 b'author': stringutil.person(ctx.user()),
1052 b'author': stringutil.person(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1053 b'authorEmail': stringutil.email(ctx.user()),
1054 b'time': int(ctx.date()[0]),
1054 b'time': int(ctx.date()[0]),
1055 b'commit': ctx.hex(),
1055 b'commit': ctx.hex(),
1056 b'parents': [ctx.p1().hex()],
1056 b'parents': [ctx.p1().hex()],
1057 b'branch': ctx.branch(),
1057 b'branch': ctx.branch(),
1058 }
1058 }
1059 params = {
1059 params = {
1060 b'diff_id': diffid,
1060 b'diff_id': diffid,
1061 b'name': b'local:commits',
1061 b'name': b'local:commits',
1062 b'data': templatefilters.json(commits),
1062 b'data': templatefilters.json(commits),
1063 }
1063 }
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1064 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1065
1065
1066
1066
1067 def createdifferentialrevision(
1067 def createdifferentialrevision(
1068 ctxs,
1068 ctxs,
1069 revid=None,
1069 revid=None,
1070 parentrevphid=None,
1070 parentrevphid=None,
1071 oldbasenode=None,
1071 oldbasenode=None,
1072 oldnode=None,
1072 oldnode=None,
1073 olddiff=None,
1073 olddiff=None,
1074 actions=None,
1074 actions=None,
1075 comment=None,
1075 comment=None,
1076 ):
1076 ):
1077 """create or update a Differential Revision
1077 """create or update a Differential Revision
1078
1078
1079 If revid is None, create a new Differential Revision, otherwise update
1079 If revid is None, create a new Differential Revision, otherwise update
1080 revid. If parentrevphid is not None, set it as a dependency.
1080 revid. If parentrevphid is not None, set it as a dependency.
1081
1081
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1082 If there is a single commit for the new Differential Revision, ``ctxs`` will
1083 be a list of that single context. Otherwise, it is a list that covers the
1083 be a list of that single context. Otherwise, it is a list that covers the
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1084 range of changes for the differential, where ``ctxs[0]`` is the first change
1085 to include and ``ctxs[-1]`` is the last.
1085 to include and ``ctxs[-1]`` is the last.
1086
1086
1087 If oldnode is not None, check if the patch content (without commit message
1087 If oldnode is not None, check if the patch content (without commit message
1088 and metadata) has changed before creating another diff. For a Revision with
1088 and metadata) has changed before creating another diff. For a Revision with
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1089 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1090 Revision covering multiple commits, ``oldbasenode`` corresponds to
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1091 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1092 corresponds to ``ctxs[-1]``.
1092 corresponds to ``ctxs[-1]``.
1093
1093
1094 If actions is not None, they will be appended to the transaction.
1094 If actions is not None, they will be appended to the transaction.
1095 """
1095 """
1096 ctx = ctxs[-1]
1096 ctx = ctxs[-1]
1097 basectx = ctxs[0]
1097 basectx = ctxs[0]
1098
1098
1099 repo = ctx.repo()
1099 repo = ctx.repo()
1100 if oldnode:
1100 if oldnode:
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1101 diffopts = mdiff.diffopts(git=True, context=32767)
1102 unfi = repo.unfiltered()
1102 unfi = repo.unfiltered()
1103 oldctx = unfi[oldnode]
1103 oldctx = unfi[oldnode]
1104 oldbasectx = unfi[oldbasenode]
1104 oldbasectx = unfi[oldbasenode]
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1105 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1106 oldbasectx, oldctx, diffopts
1106 oldbasectx, oldctx, diffopts
1107 )
1107 )
1108 else:
1108 else:
1109 neednewdiff = True
1109 neednewdiff = True
1110
1110
1111 transactions = []
1111 transactions = []
1112 if neednewdiff:
1112 if neednewdiff:
1113 diff = creatediff(basectx, ctx)
1113 diff = creatediff(basectx, ctx)
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1114 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1115 if comment:
1115 if comment:
1116 transactions.append({b'type': b'comment', b'value': comment})
1116 transactions.append({b'type': b'comment', b'value': comment})
1117 else:
1117 else:
1118 # Even if we don't need to upload a new diff because the patch content
1118 # Even if we don't need to upload a new diff because the patch content
1119 # does not change. We might still need to update its metadata so
1119 # does not change. We might still need to update its metadata so
1120 # pushers could know the correct node metadata.
1120 # pushers could know the correct node metadata.
1121 assert olddiff
1121 assert olddiff
1122 diff = olddiff
1122 diff = olddiff
1123 writediffproperties(ctxs, diff)
1123 writediffproperties(ctxs, diff)
1124
1124
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1125 # Set the parent Revision every time, so commit re-ordering is picked-up
1126 if parentrevphid:
1126 if parentrevphid:
1127 transactions.append(
1127 transactions.append(
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1128 {b'type': b'parents.set', b'value': [parentrevphid]}
1129 )
1129 )
1130
1130
1131 if actions:
1131 if actions:
1132 transactions += actions
1132 transactions += actions
1133
1133
1134 # When folding multiple local commits into a single review, arcanist will
1134 # When folding multiple local commits into a single review, arcanist will
1135 # take the summary line of the first commit as the title, and then
1135 # take the summary line of the first commit as the title, and then
1136 # concatenate the rest of the remaining messages (including each of their
1136 # concatenate the rest of the remaining messages (including each of their
1137 # first lines) to the rest of the first commit message (each separated by
1137 # first lines) to the rest of the first commit message (each separated by
1138 # an empty line), and use that as the summary field. Do the same here.
1138 # an empty line), and use that as the summary field. Do the same here.
1139 # For commits with only a one line message, there is no summary field, as
1139 # For commits with only a one line message, there is no summary field, as
1140 # this gets assigned to the title.
1140 # this gets assigned to the title.
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1141 fields = util.sortdict() # sorted for stable wire protocol in tests
1142
1142
1143 for i, _ctx in enumerate(ctxs):
1143 for i, _ctx in enumerate(ctxs):
1144 # Parse commit message and update related fields.
1144 # Parse commit message and update related fields.
1145 desc = _ctx.description()
1145 desc = _ctx.description()
1146 info = callconduit(
1146 info = callconduit(
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1147 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1148 )
1148 )
1149
1149
1150 for k in [b'title', b'summary', b'testPlan']:
1150 for k in [b'title', b'summary', b'testPlan']:
1151 v = info[b'fields'].get(k)
1151 v = info[b'fields'].get(k)
1152 if not v:
1152 if not v:
1153 continue
1153 continue
1154
1154
1155 if i == 0:
1155 if i == 0:
1156 # Title, summary and test plan (if present) are taken verbatim
1156 # Title, summary and test plan (if present) are taken verbatim
1157 # for the first commit.
1157 # for the first commit.
1158 fields[k] = v.rstrip()
1158 fields[k] = v.rstrip()
1159 continue
1159 continue
1160 elif k == b'title':
1160 elif k == b'title':
1161 # Add subsequent titles (i.e. the first line of the commit
1161 # Add subsequent titles (i.e. the first line of the commit
1162 # message) back to the summary.
1162 # message) back to the summary.
1163 k = b'summary'
1163 k = b'summary'
1164
1164
1165 # Append any current field to the existing composite field
1165 # Append any current field to the existing composite field
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1166 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1167
1167
1168 for k, v in fields.items():
1168 for k, v in fields.items():
1169 transactions.append({b'type': k, b'value': v})
1169 transactions.append({b'type': k, b'value': v})
1170
1170
1171 params = {b'transactions': transactions}
1171 params = {b'transactions': transactions}
1172 if revid is not None:
1172 if revid is not None:
1173 # Update an existing Differential Revision
1173 # Update an existing Differential Revision
1174 params[b'objectIdentifier'] = revid
1174 params[b'objectIdentifier'] = revid
1175
1175
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1176 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1177 if not revision:
1177 if not revision:
1178 if len(ctxs) == 1:
1178 if len(ctxs) == 1:
1179 msg = _(b'cannot create revision for %s') % ctx
1179 msg = _(b'cannot create revision for %s') % ctx
1180 else:
1180 else:
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1181 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1182 raise error.Abort(msg)
1182 raise error.Abort(msg)
1183
1183
1184 return revision, diff
1184 return revision, diff
1185
1185
1186
1186
1187 def userphids(ui, names):
1187 def userphids(ui, names):
1188 """convert user names to PHIDs"""
1188 """convert user names to PHIDs"""
1189 names = [name.lower() for name in names]
1189 names = [name.lower() for name in names]
1190 query = {b'constraints': {b'usernames': names}}
1190 query = {b'constraints': {b'usernames': names}}
1191 result = callconduit(ui, b'user.search', query)
1191 result = callconduit(ui, b'user.search', query)
1192 # username not found is not an error of the API. So check if we have missed
1192 # username not found is not an error of the API. So check if we have missed
1193 # some names here.
1193 # some names here.
1194 data = result[b'data']
1194 data = result[b'data']
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1195 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1196 unresolved = set(names) - resolved
1196 unresolved = set(names) - resolved
1197 if unresolved:
1197 if unresolved:
1198 raise error.Abort(
1198 raise error.Abort(
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1199 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1200 )
1200 )
1201 return [entry[b'phid'] for entry in data]
1201 return [entry[b'phid'] for entry in data]
1202
1202
1203
1203
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1204 def _print_phabsend_action(ui, ctx, newrevid, action):
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1205 """print the ``action`` that occurred when posting ``ctx`` for review
1206
1206
1207 This is a utility function for the sending phase of ``phabsend``, which
1207 This is a utility function for the sending phase of ``phabsend``, which
1208 makes it easier to show a status for all local commits with `--fold``.
1208 makes it easier to show a status for all local commits with `--fold``.
1209 """
1209 """
1210 actiondesc = ui.label(
1210 actiondesc = ui.label(
1211 {
1211 {
1212 b'created': _(b'created'),
1212 b'created': _(b'created'),
1213 b'skipped': _(b'skipped'),
1213 b'skipped': _(b'skipped'),
1214 b'updated': _(b'updated'),
1214 b'updated': _(b'updated'),
1215 }[action],
1215 }[action],
1216 b'phabricator.action.%s' % action,
1216 b'phabricator.action.%s' % action,
1217 )
1217 )
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1218 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1219 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1220 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1221 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
1222
1222
1223
1223
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1224 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1225 """update the local commit list for the ``diff`` associated with ``drevid``
1226
1226
1227 This is a utility function for the amend phase of ``phabsend``, which
1227 This is a utility function for the amend phase of ``phabsend``, which
1228 converts failures to warning messages.
1228 converts failures to warning messages.
1229 """
1229 """
1230 _debug(
1230 _debug(
1231 unfi.ui,
1231 unfi.ui,
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1232 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1233 )
1233 )
1234
1234
1235 try:
1235 try:
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1236 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1237 except util.urlerr.urlerror:
1237 except util.urlerr.urlerror:
1238 # If it fails just warn and keep going, otherwise the DREV
1238 # If it fails just warn and keep going, otherwise the DREV
1239 # associations will be lost
1239 # associations will be lost
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1240 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1241
1241
1242
1242
1243 @vcrcommand(
1243 @vcrcommand(
1244 b'phabsend',
1244 b'phabsend',
1245 [
1245 [
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1246 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1247 (b'', b'amend', True, _(b'update commit messages')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1248 (b'', b'reviewer', [], _(b'specify reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1249 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1250 (
1250 (
1251 b'm',
1251 b'm',
1252 b'comment',
1252 b'comment',
1253 b'',
1253 b'',
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1254 _(b'add a comment to Revisions with new/updated Diffs'),
1255 ),
1255 ),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1256 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1257 (b'', b'fold', False, _(b'combine the revisions into one review')),
1258 ],
1258 ],
1259 _(b'REV [OPTIONS]'),
1259 _(b'REV [OPTIONS]'),
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1260 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1261 )
1261 )
1262 def phabsend(ui, repo, *revs, **opts):
1262 def phabsend(ui, repo, *revs, **opts):
1263 """upload changesets to Phabricator
1263 """upload changesets to Phabricator
1264
1264
1265 If there are multiple revisions specified, they will be send as a stack
1265 If there are multiple revisions specified, they will be send as a stack
1266 with a linear dependencies relationship using the order specified by the
1266 with a linear dependencies relationship using the order specified by the
1267 revset.
1267 revset.
1268
1268
1269 For the first time uploading changesets, local tags will be created to
1269 For the first time uploading changesets, local tags will be created to
1270 maintain the association. After the first time, phabsend will check
1270 maintain the association. After the first time, phabsend will check
1271 obsstore and tags information so it can figure out whether to update an
1271 obsstore and tags information so it can figure out whether to update an
1272 existing Differential Revision, or create a new one.
1272 existing Differential Revision, or create a new one.
1273
1273
1274 If --amend is set, update commit messages so they have the
1274 If --amend is set, update commit messages so they have the
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1275 ``Differential Revision`` URL, remove related tags. This is similar to what
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1276 arcanist will do, and is more desired in author-push workflows. Otherwise,
1277 use local tags to record the ``Differential Revision`` association.
1277 use local tags to record the ``Differential Revision`` association.
1278
1278
1279 The --confirm option lets you confirm changesets before sending them. You
1279 The --confirm option lets you confirm changesets before sending them. You
1280 can also add following to your configuration file to make it default
1280 can also add following to your configuration file to make it default
1281 behaviour::
1281 behaviour::
1282
1282
1283 [phabsend]
1283 [phabsend]
1284 confirm = true
1284 confirm = true
1285
1285
1286 By default, a separate review will be created for each commit that is
1286 By default, a separate review will be created for each commit that is
1287 selected, and will have the same parent/child relationship in Phabricator.
1287 selected, and will have the same parent/child relationship in Phabricator.
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1288 If ``--fold`` is set, multiple commits are rolled up into a single review
1289 as if diffed from the parent of the first revision to the last. The commit
1289 as if diffed from the parent of the first revision to the last. The commit
1290 messages are concatenated in the summary field on Phabricator.
1290 messages are concatenated in the summary field on Phabricator.
1291
1291
1292 phabsend will check obsstore and the above association to decide whether to
1292 phabsend will check obsstore and the above association to decide whether to
1293 update an existing Differential Revision, or create a new one.
1293 update an existing Differential Revision, or create a new one.
1294 """
1294 """
1295 opts = pycompat.byteskwargs(opts)
1295 opts = pycompat.byteskwargs(opts)
1296 revs = list(revs) + opts.get(b'rev', [])
1296 revs = list(revs) + opts.get(b'rev', [])
1297 revs = scmutil.revrange(repo, revs)
1297 revs = scmutil.revrange(repo, revs)
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1298 revs.sort() # ascending order to preserve topological parent/child in phab
1299
1299
1300 if not revs:
1300 if not revs:
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1301 raise error.Abort(_(b'phabsend requires at least one changeset'))
1302 if opts.get(b'amend'):
1302 if opts.get(b'amend'):
1303 cmdutil.checkunfinished(repo)
1303 cmdutil.checkunfinished(repo)
1304
1304
1305 ctxs = [repo[rev] for rev in revs]
1305 ctxs = [repo[rev] for rev in revs]
1306
1306
1307 if any(c for c in ctxs if c.obsolete()):
1307 if any(c for c in ctxs if c.obsolete()):
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1308 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1309
1309
1310 # Ensure the local commits are an unbroken range. The semantics of the
1310 # Ensure the local commits are an unbroken range. The semantics of the
1311 # --fold option implies this, and the auto restacking of orphans requires
1311 # --fold option implies this, and the auto restacking of orphans requires
1312 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1312 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1313 # get A' as a parent.
1313 # get A' as a parent.
1314 def _fail_nonlinear_revs(revs, skiprev, revtype):
1314 def _fail_nonlinear_revs(revs, skiprev, revtype):
1315 badnodes = [repo[r].node() for r in revs if r != skiprev]
1315 badnodes = [repo[r].node() for r in revs if r != skiprev]
1316 raise error.Abort(
1316 raise error.Abort(
1317 _(b"cannot phabsend multiple %s revisions: %s")
1317 _(b"cannot phabsend multiple %s revisions: %s")
1318 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1318 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1319 hint=_(b"the revisions must form a linear chain"),
1319 hint=_(b"the revisions must form a linear chain"),
1320 )
1320 )
1321
1321
1322 heads = repo.revs(b'heads(%ld)', revs)
1322 heads = repo.revs(b'heads(%ld)', revs)
1323 if len(heads) > 1:
1323 if len(heads) > 1:
1324 _fail_nonlinear_revs(heads, heads.max(), b"head")
1324 _fail_nonlinear_revs(heads, heads.max(), b"head")
1325
1325
1326 roots = repo.revs(b'roots(%ld)', revs)
1326 roots = repo.revs(b'roots(%ld)', revs)
1327 if len(roots) > 1:
1327 if len(roots) > 1:
1328 _fail_nonlinear_revs(roots, roots.min(), b"root")
1328 _fail_nonlinear_revs(roots, roots.min(), b"root")
1329
1329
1330 fold = opts.get(b'fold')
1330 fold = opts.get(b'fold')
1331 if fold:
1331 if fold:
1332 if len(revs) == 1:
1332 if len(revs) == 1:
1333 # TODO: just switch to --no-fold instead?
1333 # TODO: just switch to --no-fold instead?
1334 raise error.Abort(_(b"cannot fold a single revision"))
1334 raise error.Abort(_(b"cannot fold a single revision"))
1335
1335
1336 # There's no clear way to manage multiple commits with a Dxxx tag, so
1336 # There's no clear way to manage multiple commits with a Dxxx tag, so
1337 # require the amend option. (We could append "_nnn", but then it
1337 # require the amend option. (We could append "_nnn", but then it
1338 # becomes jumbled if earlier commits are added to an update.) It should
1338 # becomes jumbled if earlier commits are added to an update.) It should
1339 # lock the repo and ensure that the range is editable, but that would
1339 # lock the repo and ensure that the range is editable, but that would
1340 # make the code pretty convoluted. The default behavior of `arc` is to
1340 # make the code pretty convoluted. The default behavior of `arc` is to
1341 # create a new review anyway.
1341 # create a new review anyway.
1342 if not opts.get(b"amend"):
1342 if not opts.get(b"amend"):
1343 raise error.Abort(_(b"cannot fold with --no-amend"))
1343 raise error.Abort(_(b"cannot fold with --no-amend"))
1344
1344
1345 # It might be possible to bucketize the revisions by the DREV value, and
1345 # It might be possible to bucketize the revisions by the DREV value, and
1346 # iterate over those groups when posting, and then again when amending.
1346 # iterate over those groups when posting, and then again when amending.
1347 # But for simplicity, require all selected revisions to be for the same
1347 # But for simplicity, require all selected revisions to be for the same
1348 # DREV (if present). Adding local revisions to an existing DREV is
1348 # DREV (if present). Adding local revisions to an existing DREV is
1349 # acceptable.
1349 # acceptable.
1350 drevmatchers = [
1350 drevmatchers = [
1351 _differentialrevisiondescre.search(ctx.description())
1351 _differentialrevisiondescre.search(ctx.description())
1352 for ctx in ctxs
1352 for ctx in ctxs
1353 ]
1353 ]
1354 if len({m.group('url') for m in drevmatchers if m}) > 1:
1354 if len({m.group('url') for m in drevmatchers if m}) > 1:
1355 raise error.Abort(
1355 raise error.Abort(
1356 _(b"cannot fold revisions with different DREV values")
1356 _(b"cannot fold revisions with different DREV values")
1357 )
1357 )
1358
1358
1359 # {newnode: (oldnode, olddiff, olddrev}
1359 # {newnode: (oldnode, olddiff, olddrev}
1360 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1360 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1361
1361
1362 confirm = ui.configbool(b'phabsend', b'confirm')
1362 confirm = ui.configbool(b'phabsend', b'confirm')
1363 confirm |= bool(opts.get(b'confirm'))
1363 confirm |= bool(opts.get(b'confirm'))
1364 if confirm:
1364 if confirm:
1365 confirmed = _confirmbeforesend(repo, revs, oldmap)
1365 confirmed = _confirmbeforesend(repo, revs, oldmap)
1366 if not confirmed:
1366 if not confirmed:
1367 raise error.Abort(_(b'phabsend cancelled'))
1367 raise error.Abort(_(b'phabsend cancelled'))
1368
1368
1369 actions = []
1369 actions = []
1370 reviewers = opts.get(b'reviewer', [])
1370 reviewers = opts.get(b'reviewer', [])
1371 blockers = opts.get(b'blocker', [])
1371 blockers = opts.get(b'blocker', [])
1372 phids = []
1372 phids = []
1373 if reviewers:
1373 if reviewers:
1374 phids.extend(userphids(repo.ui, reviewers))
1374 phids.extend(userphids(repo.ui, reviewers))
1375 if blockers:
1375 if blockers:
1376 phids.extend(
1376 phids.extend(
1377 map(
1377 map(
1378 lambda phid: b'blocking(%s)' % phid,
1378 lambda phid: b'blocking(%s)' % phid,
1379 userphids(repo.ui, blockers),
1379 userphids(repo.ui, blockers),
1380 )
1380 )
1381 )
1381 )
1382 if phids:
1382 if phids:
1383 actions.append({b'type': b'reviewers.add', b'value': phids})
1383 actions.append({b'type': b'reviewers.add', b'value': phids})
1384
1384
1385 drevids = [] # [int]
1385 drevids = [] # [int]
1386 diffmap = {} # {newnode: diff}
1386 diffmap = {} # {newnode: diff}
1387
1387
1388 # Send patches one by one so we know their Differential Revision PHIDs and
1388 # Send patches one by one so we know their Differential Revision PHIDs and
1389 # can provide dependency relationship
1389 # can provide dependency relationship
1390 lastrevphid = None
1390 lastrevphid = None
1391 for ctx in ctxs:
1391 for ctx in ctxs:
1392 if fold:
1392 if fold:
1393 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1393 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1394 else:
1394 else:
1395 ui.debug(b'sending rev %d\n' % ctx.rev())
1395 ui.debug(b'sending rev %d\n' % ctx.rev())
1396
1396
1397 # Get Differential Revision ID
1397 # Get Differential Revision ID
1398 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1398 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1399 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1399 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1400
1400
1401 if fold:
1401 if fold:
1402 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1402 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1403 ctxs[-1].node(), (None, None, None)
1403 ctxs[-1].node(), (None, None, None)
1404 )
1404 )
1405
1405
1406 if oldnode != ctx.node() or opts.get(b'amend'):
1406 if oldnode != ctx.node() or opts.get(b'amend'):
1407 # Create or update Differential Revision
1407 # Create or update Differential Revision
1408 revision, diff = createdifferentialrevision(
1408 revision, diff = createdifferentialrevision(
1409 ctxs if fold else [ctx],
1409 ctxs if fold else [ctx],
1410 revid,
1410 revid,
1411 lastrevphid,
1411 lastrevphid,
1412 oldbasenode,
1412 oldbasenode,
1413 oldnode,
1413 oldnode,
1414 olddiff,
1414 olddiff,
1415 actions,
1415 actions,
1416 opts.get(b'comment'),
1416 opts.get(b'comment'),
1417 )
1417 )
1418
1418
1419 if fold:
1419 if fold:
1420 for ctx in ctxs:
1420 for ctx in ctxs:
1421 diffmap[ctx.node()] = diff
1421 diffmap[ctx.node()] = diff
1422 else:
1422 else:
1423 diffmap[ctx.node()] = diff
1423 diffmap[ctx.node()] = diff
1424
1424
1425 newrevid = int(revision[b'object'][b'id'])
1425 newrevid = int(revision[b'object'][b'id'])
1426 newrevphid = revision[b'object'][b'phid']
1426 newrevphid = revision[b'object'][b'phid']
1427 if revid:
1427 if revid:
1428 action = b'updated'
1428 action = b'updated'
1429 else:
1429 else:
1430 action = b'created'
1430 action = b'created'
1431
1431
1432 # Create a local tag to note the association, if commit message
1432 # Create a local tag to note the association, if commit message
1433 # does not have it already
1433 # does not have it already
1434 if not fold:
1434 if not fold:
1435 m = _differentialrevisiondescre.search(ctx.description())
1435 m = _differentialrevisiondescre.search(ctx.description())
1436 if not m or int(m.group('id')) != newrevid:
1436 if not m or int(m.group('id')) != newrevid:
1437 tagname = b'D%d' % newrevid
1437 tagname = b'D%d' % newrevid
1438 tags.tag(
1438 tags.tag(
1439 repo,
1439 repo,
1440 tagname,
1440 tagname,
1441 ctx.node(),
1441 ctx.node(),
1442 message=None,
1442 message=None,
1443 user=None,
1443 user=None,
1444 date=None,
1444 date=None,
1445 local=True,
1445 local=True,
1446 )
1446 )
1447 else:
1447 else:
1448 # Nothing changed. But still set "newrevphid" so the next revision
1448 # Nothing changed. But still set "newrevphid" so the next revision
1449 # could depend on this one and "newrevid" for the summary line.
1449 # could depend on this one and "newrevid" for the summary line.
1450 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1450 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1451 newrevid = revid
1451 newrevid = revid
1452 action = b'skipped'
1452 action = b'skipped'
1453
1453
1454 drevids.append(newrevid)
1454 drevids.append(newrevid)
1455 lastrevphid = newrevphid
1455 lastrevphid = newrevphid
1456
1456
1457 if fold:
1457 if fold:
1458 for c in ctxs:
1458 for c in ctxs:
1459 if oldmap.get(c.node(), (None, None, None))[2]:
1459 if oldmap.get(c.node(), (None, None, None))[2]:
1460 action = b'updated'
1460 action = b'updated'
1461 else:
1461 else:
1462 action = b'created'
1462 action = b'created'
1463 _print_phabsend_action(ui, c, newrevid, action)
1463 _print_phabsend_action(ui, c, newrevid, action)
1464 break
1464 break
1465
1465
1466 _print_phabsend_action(ui, ctx, newrevid, action)
1466 _print_phabsend_action(ui, ctx, newrevid, action)
1467
1467
1468 # Update commit messages and remove tags
1468 # Update commit messages and remove tags
1469 if opts.get(b'amend'):
1469 if opts.get(b'amend'):
1470 unfi = repo.unfiltered()
1470 unfi = repo.unfiltered()
1471 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1471 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1472 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1472 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1473 # Eagerly evaluate commits to restabilize before creating new
1473 # Eagerly evaluate commits to restabilize before creating new
1474 # commits. The selected revisions are excluded because they are
1474 # commits. The selected revisions are excluded because they are
1475 # automatically restacked as part of the submission process.
1475 # automatically restacked as part of the submission process.
1476 restack = [
1476 restack = [
1477 c
1477 c
1478 for c in repo.set(
1478 for c in repo.set(
1479 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1479 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1480 revs,
1480 revs,
1481 revs,
1481 revs,
1482 )
1482 )
1483 ]
1483 ]
1484 wnode = unfi[b'.'].node()
1484 wnode = unfi[b'.'].node()
1485 mapping = {} # {oldnode: [newnode]}
1485 mapping = {} # {oldnode: [newnode]}
1486 newnodes = []
1486 newnodes = []
1487
1487
1488 drevid = drevids[0]
1488 drevid = drevids[0]
1489
1489
1490 for i, rev in enumerate(revs):
1490 for i, rev in enumerate(revs):
1491 old = unfi[rev]
1491 old = unfi[rev]
1492 if not fold:
1492 if not fold:
1493 drevid = drevids[i]
1493 drevid = drevids[i]
1494 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1494 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1495
1495
1496 newdesc = get_amended_desc(drev, old, fold)
1496 newdesc = get_amended_desc(drev, old, fold)
1497 # Make sure commit message contain "Differential Revision"
1497 # Make sure commit message contain "Differential Revision"
1498 if (
1498 if (
1499 old.description() != newdesc
1499 old.description() != newdesc
1500 or old.p1().node() in mapping
1500 or old.p1().node() in mapping
1501 or old.p2().node() in mapping
1501 or old.p2().node() in mapping
1502 ):
1502 ):
1503 if old.phase() == phases.public:
1503 if old.phase() == phases.public:
1504 ui.warn(
1504 ui.warn(
1505 _(b"warning: not updating public commit %s\n")
1505 _(b"warning: not updating public commit %s\n")
1506 % scmutil.formatchangeid(old)
1506 % scmutil.formatchangeid(old)
1507 )
1507 )
1508 continue
1508 continue
1509 parents = [
1509 parents = [
1510 mapping.get(old.p1().node(), (old.p1(),))[0],
1510 mapping.get(old.p1().node(), (old.p1(),))[0],
1511 mapping.get(old.p2().node(), (old.p2(),))[0],
1511 mapping.get(old.p2().node(), (old.p2(),))[0],
1512 ]
1512 ]
1513 new = context.metadataonlyctx(
1513 new = context.metadataonlyctx(
1514 repo,
1514 repo,
1515 old,
1515 old,
1516 parents=parents,
1516 parents=parents,
1517 text=newdesc,
1517 text=newdesc,
1518 user=old.user(),
1518 user=old.user(),
1519 date=old.date(),
1519 date=old.date(),
1520 extra=old.extra(),
1520 extra=old.extra(),
1521 )
1521 )
1522
1522
1523 newnode = new.commit()
1523 newnode = new.commit()
1524
1524
1525 mapping[old.node()] = [newnode]
1525 mapping[old.node()] = [newnode]
1526
1526
1527 if fold:
1527 if fold:
1528 # Defer updating the (single) Diff until all nodes are
1528 # Defer updating the (single) Diff until all nodes are
1529 # collected. No tags were created, so none need to be
1529 # collected. No tags were created, so none need to be
1530 # removed.
1530 # removed.
1531 newnodes.append(newnode)
1531 newnodes.append(newnode)
1532 continue
1532 continue
1533
1533
1534 _amend_diff_properties(
1534 _amend_diff_properties(
1535 unfi, drevid, [newnode], diffmap[old.node()]
1535 unfi, drevid, [newnode], diffmap[old.node()]
1536 )
1536 )
1537
1537
1538 # Remove local tags since it's no longer necessary
1538 # Remove local tags since it's no longer necessary
1539 tagname = b'D%d' % drevid
1539 tagname = b'D%d' % drevid
1540 if tagname in repo.tags():
1540 if tagname in repo.tags():
1541 tags.tag(
1541 tags.tag(
1542 repo,
1542 repo,
1543 tagname,
1543 tagname,
1544 nullid,
1544 nullid,
1545 message=None,
1545 message=None,
1546 user=None,
1546 user=None,
1547 date=None,
1547 date=None,
1548 local=True,
1548 local=True,
1549 )
1549 )
1550 elif fold:
1550 elif fold:
1551 # When folding multiple commits into one review with
1551 # When folding multiple commits into one review with
1552 # --fold, track even the commits that weren't amended, so
1552 # --fold, track even the commits that weren't amended, so
1553 # that their association isn't lost if the properties are
1553 # that their association isn't lost if the properties are
1554 # rewritten below.
1554 # rewritten below.
1555 newnodes.append(old.node())
1555 newnodes.append(old.node())
1556
1556
1557 # If the submitted commits are public, no amend takes place so
1557 # If the submitted commits are public, no amend takes place so
1558 # there are no newnodes and therefore no diff update to do.
1558 # there are no newnodes and therefore no diff update to do.
1559 if fold and newnodes:
1559 if fold and newnodes:
1560 diff = diffmap[old.node()]
1560 diff = diffmap[old.node()]
1561
1561
1562 # The diff object in diffmap doesn't have the local commits
1562 # The diff object in diffmap doesn't have the local commits
1563 # because that could be returned from differential.creatediff,
1563 # because that could be returned from differential.creatediff,
1564 # not differential.querydiffs. So use the queried diff (if
1564 # not differential.querydiffs. So use the queried diff (if
1565 # present), or force the amend (a new revision is being posted.)
1565 # present), or force the amend (a new revision is being posted.)
1566 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1566 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1567 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1567 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1568 _amend_diff_properties(unfi, drevid, newnodes, diff)
1568 _amend_diff_properties(unfi, drevid, newnodes, diff)
1569 else:
1569 else:
1570 _debug(
1570 _debug(
1571 ui,
1571 ui,
1572 b"local commit list for D%d is already up-to-date\n"
1572 b"local commit list for D%d is already up-to-date\n"
1573 % drevid,
1573 % drevid,
1574 )
1574 )
1575 elif fold:
1575 elif fold:
1576 _debug(ui, b"no newnodes to update\n")
1576 _debug(ui, b"no newnodes to update\n")
1577
1577
1578 # Restack any children of first-time submissions that were orphaned
1578 # Restack any children of first-time submissions that were orphaned
1579 # in the process. The ctx won't report that it is an orphan until
1579 # in the process. The ctx won't report that it is an orphan until
1580 # the cleanup takes place below.
1580 # the cleanup takes place below.
1581 for old in restack:
1581 for old in restack:
1582 parents = [
1582 parents = [
1583 mapping.get(old.p1().node(), (old.p1(),))[0],
1583 mapping.get(old.p1().node(), (old.p1(),))[0],
1584 mapping.get(old.p2().node(), (old.p2(),))[0],
1584 mapping.get(old.p2().node(), (old.p2(),))[0],
1585 ]
1585 ]
1586 new = context.metadataonlyctx(
1586 new = context.metadataonlyctx(
1587 repo,
1587 repo,
1588 old,
1588 old,
1589 parents=parents,
1589 parents=parents,
1590 text=old.description(),
1590 text=old.description(),
1591 user=old.user(),
1591 user=old.user(),
1592 date=old.date(),
1592 date=old.date(),
1593 extra=old.extra(),
1593 extra=old.extra(),
1594 )
1594 )
1595
1595
1596 newnode = new.commit()
1596 newnode = new.commit()
1597
1597
1598 # Don't obsolete unselected descendants of nodes that have not
1598 # Don't obsolete unselected descendants of nodes that have not
1599 # been changed in this transaction- that results in an error.
1599 # been changed in this transaction- that results in an error.
1600 if newnode != old.node():
1600 if newnode != old.node():
1601 mapping[old.node()] = [newnode]
1601 mapping[old.node()] = [newnode]
1602 _debug(
1602 _debug(
1603 ui,
1603 ui,
1604 b"restabilizing %s as %s\n"
1604 b"restabilizing %s as %s\n"
1605 % (short(old.node()), short(newnode)),
1605 % (short(old.node()), short(newnode)),
1606 )
1606 )
1607 else:
1607 else:
1608 _debug(
1608 _debug(
1609 ui,
1609 ui,
1610 b"not restabilizing unchanged %s\n" % short(old.node()),
1610 b"not restabilizing unchanged %s\n" % short(old.node()),
1611 )
1611 )
1612
1612
1613 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1613 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1614 if wnode in mapping:
1614 if wnode in mapping:
1615 unfi.setparents(mapping[wnode][0])
1615 unfi.setparents(mapping[wnode][0])
1616
1616
1617
1617
1618 # Map from "hg:meta" keys to header understood by "hg import". The order is
1618 # Map from "hg:meta" keys to header understood by "hg import". The order is
1619 # consistent with "hg export" output.
1619 # consistent with "hg export" output.
1620 _metanamemap = util.sortdict(
1620 _metanamemap = util.sortdict(
1621 [
1621 [
1622 (b'user', b'User'),
1622 (b'user', b'User'),
1623 (b'date', b'Date'),
1623 (b'date', b'Date'),
1624 (b'branch', b'Branch'),
1624 (b'branch', b'Branch'),
1625 (b'node', b'Node ID'),
1625 (b'node', b'Node ID'),
1626 (b'parent', b'Parent '),
1626 (b'parent', b'Parent '),
1627 ]
1627 ]
1628 )
1628 )
1629
1629
1630
1630
1631 def _confirmbeforesend(repo, revs, oldmap):
1631 def _confirmbeforesend(repo, revs, oldmap):
1632 url, token = readurltoken(repo.ui)
1632 url, token = readurltoken(repo.ui)
1633 ui = repo.ui
1633 ui = repo.ui
1634 for rev in revs:
1634 for rev in revs:
1635 ctx = repo[rev]
1635 ctx = repo[rev]
1636 desc = ctx.description().splitlines()[0]
1636 desc = ctx.description().splitlines()[0]
1637 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1637 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1638 if drevid:
1638 if drevid:
1639 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1639 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1640 else:
1640 else:
1641 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1641 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1642
1642
1643 ui.write(
1643 ui.write(
1644 _(b'%s - %s: %s\n')
1644 _(b'%s - %s: %s\n')
1645 % (
1645 % (
1646 drevdesc,
1646 drevdesc,
1647 ui.label(bytes(ctx), b'phabricator.node'),
1647 ui.label(bytes(ctx), b'phabricator.node'),
1648 ui.label(desc, b'phabricator.desc'),
1648 ui.label(desc, b'phabricator.desc'),
1649 )
1649 )
1650 )
1650 )
1651
1651
1652 if ui.promptchoice(
1652 if ui.promptchoice(
1653 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1653 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1654 ):
1654 ):
1655 return False
1655 return False
1656
1656
1657 return True
1657 return True
1658
1658
1659
1659
1660 _knownstatusnames = {
1660 _knownstatusnames = {
1661 b'accepted',
1661 b'accepted',
1662 b'needsreview',
1662 b'needsreview',
1663 b'needsrevision',
1663 b'needsrevision',
1664 b'closed',
1664 b'closed',
1665 b'abandoned',
1665 b'abandoned',
1666 b'changesplanned',
1666 b'changesplanned',
1667 }
1667 }
1668
1668
1669
1669
1670 def _getstatusname(drev):
1670 def _getstatusname(drev):
1671 """get normalized status name from a Differential Revision"""
1671 """get normalized status name from a Differential Revision"""
1672 return drev[b'statusName'].replace(b' ', b'').lower()
1672 return drev[b'statusName'].replace(b' ', b'').lower()
1673
1673
1674
1674
1675 # Small language to specify differential revisions. Support symbols: (), :X,
1675 # Small language to specify differential revisions. Support symbols: (), :X,
1676 # +, and -.
1676 # +, and -.
1677
1677
1678 _elements = {
1678 _elements = {
1679 # token-type: binding-strength, primary, prefix, infix, suffix
1679 # token-type: binding-strength, primary, prefix, infix, suffix
1680 b'(': (12, None, (b'group', 1, b')'), None, None),
1680 b'(': (12, None, (b'group', 1, b')'), None, None),
1681 b':': (8, None, (b'ancestors', 8), None, None),
1681 b':': (8, None, (b'ancestors', 8), None, None),
1682 b'&': (5, None, None, (b'and_', 5), None),
1682 b'&': (5, None, None, (b'and_', 5), None),
1683 b'+': (4, None, None, (b'add', 4), None),
1683 b'+': (4, None, None, (b'add', 4), None),
1684 b'-': (4, None, None, (b'sub', 4), None),
1684 b'-': (4, None, None, (b'sub', 4), None),
1685 b')': (0, None, None, None, None),
1685 b')': (0, None, None, None, None),
1686 b'symbol': (0, b'symbol', None, None, None),
1686 b'symbol': (0, b'symbol', None, None, None),
1687 b'end': (0, None, None, None, None),
1687 b'end': (0, None, None, None, None),
1688 }
1688 }
1689
1689
1690
1690
1691 def _tokenize(text):
1691 def _tokenize(text):
1692 view = memoryview(text) # zero-copy slice
1692 view = memoryview(text) # zero-copy slice
1693 special = b'():+-& '
1693 special = b'():+-& '
1694 pos = 0
1694 pos = 0
1695 length = len(text)
1695 length = len(text)
1696 while pos < length:
1696 while pos < length:
1697 symbol = b''.join(
1697 symbol = b''.join(
1698 itertools.takewhile(
1698 itertools.takewhile(
1699 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1699 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1700 )
1700 )
1701 )
1701 )
1702 if symbol:
1702 if symbol:
1703 yield (b'symbol', symbol, pos)
1703 yield (b'symbol', symbol, pos)
1704 pos += len(symbol)
1704 pos += len(symbol)
1705 else: # special char, ignore space
1705 else: # special char, ignore space
1706 if text[pos : pos + 1] != b' ':
1706 if text[pos : pos + 1] != b' ':
1707 yield (text[pos : pos + 1], None, pos)
1707 yield (text[pos : pos + 1], None, pos)
1708 pos += 1
1708 pos += 1
1709 yield (b'end', None, pos)
1709 yield (b'end', None, pos)
1710
1710
1711
1711
1712 def _parse(text):
1712 def _parse(text):
1713 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1713 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1714 if pos != len(text):
1714 if pos != len(text):
1715 raise error.ParseError(b'invalid token', pos)
1715 raise error.ParseError(b'invalid token', pos)
1716 return tree
1716 return tree
1717
1717
1718
1718
1719 def _parsedrev(symbol):
1719 def _parsedrev(symbol):
1720 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1720 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1721 if symbol.startswith(b'D') and symbol[1:].isdigit():
1721 if symbol.startswith(b'D') and symbol[1:].isdigit():
1722 return int(symbol[1:])
1722 return int(symbol[1:])
1723 if symbol.isdigit():
1723 if symbol.isdigit():
1724 return int(symbol)
1724 return int(symbol)
1725
1725
1726
1726
1727 def _prefetchdrevs(tree):
1727 def _prefetchdrevs(tree):
1728 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1728 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1729 drevs = set()
1729 drevs = set()
1730 ancestordrevs = set()
1730 ancestordrevs = set()
1731 op = tree[0]
1731 op = tree[0]
1732 if op == b'symbol':
1732 if op == b'symbol':
1733 r = _parsedrev(tree[1])
1733 r = _parsedrev(tree[1])
1734 if r:
1734 if r:
1735 drevs.add(r)
1735 drevs.add(r)
1736 elif op == b'ancestors':
1736 elif op == b'ancestors':
1737 r, a = _prefetchdrevs(tree[1])
1737 r, a = _prefetchdrevs(tree[1])
1738 drevs.update(r)
1738 drevs.update(r)
1739 ancestordrevs.update(r)
1739 ancestordrevs.update(r)
1740 ancestordrevs.update(a)
1740 ancestordrevs.update(a)
1741 else:
1741 else:
1742 for t in tree[1:]:
1742 for t in tree[1:]:
1743 r, a = _prefetchdrevs(t)
1743 r, a = _prefetchdrevs(t)
1744 drevs.update(r)
1744 drevs.update(r)
1745 ancestordrevs.update(a)
1745 ancestordrevs.update(a)
1746 return drevs, ancestordrevs
1746 return drevs, ancestordrevs
1747
1747
1748
1748
1749 def querydrev(ui, spec):
1749 def querydrev(ui, spec):
1750 """return a list of "Differential Revision" dicts
1750 """return a list of "Differential Revision" dicts
1751
1751
1752 spec is a string using a simple query language, see docstring in phabread
1752 spec is a string using a simple query language, see docstring in phabread
1753 for details.
1753 for details.
1754
1754
1755 A "Differential Revision dict" looks like:
1755 A "Differential Revision dict" looks like:
1756
1756
1757 {
1757 {
1758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1758 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1759 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1759 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1760 "auxiliary": {
1760 "auxiliary": {
1761 "phabricator:depends-on": [
1761 "phabricator:depends-on": [
1762 "PHID-DREV-gbapp366kutjebt7agcd"
1762 "PHID-DREV-gbapp366kutjebt7agcd"
1763 ]
1763 ]
1764 "phabricator:projects": [],
1764 "phabricator:projects": [],
1765 },
1765 },
1766 "branch": "default",
1766 "branch": "default",
1767 "ccs": [],
1767 "ccs": [],
1768 "commits": [],
1768 "commits": [],
1769 "dateCreated": "1499181406",
1769 "dateCreated": "1499181406",
1770 "dateModified": "1499182103",
1770 "dateModified": "1499182103",
1771 "diffs": [
1771 "diffs": [
1772 "3",
1772 "3",
1773 "4",
1773 "4",
1774 ],
1774 ],
1775 "hashes": [],
1775 "hashes": [],
1776 "id": "2",
1776 "id": "2",
1777 "lineCount": "2",
1777 "lineCount": "2",
1778 "phid": "PHID-DREV-672qvysjcczopag46qty",
1778 "phid": "PHID-DREV-672qvysjcczopag46qty",
1779 "properties": {},
1779 "properties": {},
1780 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1780 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1781 "reviewers": [],
1781 "reviewers": [],
1782 "sourcePath": null
1782 "sourcePath": null
1783 "status": "0",
1783 "status": "0",
1784 "statusName": "Needs Review",
1784 "statusName": "Needs Review",
1785 "summary": "",
1785 "summary": "",
1786 "testPlan": "",
1786 "testPlan": "",
1787 "title": "example",
1787 "title": "example",
1788 "uri": "https://phab.example.com/D2",
1788 "uri": "https://phab.example.com/D2",
1789 }
1789 }
1790 """
1790 """
1791 # TODO: replace differential.query and differential.querydiffs with
1791 # TODO: replace differential.query and differential.querydiffs with
1792 # differential.diff.search because the former (and their output) are
1792 # differential.diff.search because the former (and their output) are
1793 # frozen, and planned to be deprecated and removed.
1793 # frozen, and planned to be deprecated and removed.
1794
1794
1795 def fetch(params):
1795 def fetch(params):
1796 """params -> single drev or None"""
1796 """params -> single drev or None"""
1797 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1797 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1798 if key in prefetched:
1798 if key in prefetched:
1799 return prefetched[key]
1799 return prefetched[key]
1800 drevs = callconduit(ui, b'differential.query', params)
1800 drevs = callconduit(ui, b'differential.query', params)
1801 # Fill prefetched with the result
1801 # Fill prefetched with the result
1802 for drev in drevs:
1802 for drev in drevs:
1803 prefetched[drev[b'phid']] = drev
1803 prefetched[drev[b'phid']] = drev
1804 prefetched[int(drev[b'id'])] = drev
1804 prefetched[int(drev[b'id'])] = drev
1805 if key not in prefetched:
1805 if key not in prefetched:
1806 raise error.Abort(
1806 raise error.Abort(
1807 _(b'cannot get Differential Revision %r') % params
1807 _(b'cannot get Differential Revision %r') % params
1808 )
1808 )
1809 return prefetched[key]
1809 return prefetched[key]
1810
1810
1811 def getstack(topdrevids):
1811 def getstack(topdrevids):
1812 """given a top, get a stack from the bottom, [id] -> [id]"""
1812 """given a top, get a stack from the bottom, [id] -> [id]"""
1813 visited = set()
1813 visited = set()
1814 result = []
1814 result = []
1815 queue = [{b'ids': [i]} for i in topdrevids]
1815 queue = [{b'ids': [i]} for i in topdrevids]
1816 while queue:
1816 while queue:
1817 params = queue.pop()
1817 params = queue.pop()
1818 drev = fetch(params)
1818 drev = fetch(params)
1819 if drev[b'id'] in visited:
1819 if drev[b'id'] in visited:
1820 continue
1820 continue
1821 visited.add(drev[b'id'])
1821 visited.add(drev[b'id'])
1822 result.append(int(drev[b'id']))
1822 result.append(int(drev[b'id']))
1823 auxiliary = drev.get(b'auxiliary', {})
1823 auxiliary = drev.get(b'auxiliary', {})
1824 depends = auxiliary.get(b'phabricator:depends-on', [])
1824 depends = auxiliary.get(b'phabricator:depends-on', [])
1825 for phid in depends:
1825 for phid in depends:
1826 queue.append({b'phids': [phid]})
1826 queue.append({b'phids': [phid]})
1827 result.reverse()
1827 result.reverse()
1828 return smartset.baseset(result)
1828 return smartset.baseset(result)
1829
1829
1830 # Initialize prefetch cache
1830 # Initialize prefetch cache
1831 prefetched = {} # {id or phid: drev}
1831 prefetched = {} # {id or phid: drev}
1832
1832
1833 tree = _parse(spec)
1833 tree = _parse(spec)
1834 drevs, ancestordrevs = _prefetchdrevs(tree)
1834 drevs, ancestordrevs = _prefetchdrevs(tree)
1835
1835
1836 # developer config: phabricator.batchsize
1836 # developer config: phabricator.batchsize
1837 batchsize = ui.configint(b'phabricator', b'batchsize')
1837 batchsize = ui.configint(b'phabricator', b'batchsize')
1838
1838
1839 # Prefetch Differential Revisions in batch
1839 # Prefetch Differential Revisions in batch
1840 tofetch = set(drevs)
1840 tofetch = set(drevs)
1841 for r in ancestordrevs:
1841 for r in ancestordrevs:
1842 tofetch.update(range(max(1, r - batchsize), r + 1))
1842 tofetch.update(range(max(1, r - batchsize), r + 1))
1843 if drevs:
1843 if drevs:
1844 fetch({b'ids': list(tofetch)})
1844 fetch({b'ids': list(tofetch)})
1845 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1845 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1846
1846
1847 # Walk through the tree, return smartsets
1847 # Walk through the tree, return smartsets
1848 def walk(tree):
1848 def walk(tree):
1849 op = tree[0]
1849 op = tree[0]
1850 if op == b'symbol':
1850 if op == b'symbol':
1851 drev = _parsedrev(tree[1])
1851 drev = _parsedrev(tree[1])
1852 if drev:
1852 if drev:
1853 return smartset.baseset([drev])
1853 return smartset.baseset([drev])
1854 elif tree[1] in _knownstatusnames:
1854 elif tree[1] in _knownstatusnames:
1855 drevs = [
1855 drevs = [
1856 r
1856 r
1857 for r in validids
1857 for r in validids
1858 if _getstatusname(prefetched[r]) == tree[1]
1858 if _getstatusname(prefetched[r]) == tree[1]
1859 ]
1859 ]
1860 return smartset.baseset(drevs)
1860 return smartset.baseset(drevs)
1861 else:
1861 else:
1862 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1862 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1863 elif op in {b'and_', b'add', b'sub'}:
1863 elif op in {b'and_', b'add', b'sub'}:
1864 assert len(tree) == 3
1864 assert len(tree) == 3
1865 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1865 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1866 elif op == b'group':
1866 elif op == b'group':
1867 return walk(tree[1])
1867 return walk(tree[1])
1868 elif op == b'ancestors':
1868 elif op == b'ancestors':
1869 return getstack(walk(tree[1]))
1869 return getstack(walk(tree[1]))
1870 else:
1870 else:
1871 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1871 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1872
1872
1873 return [prefetched[r] for r in walk(tree)]
1873 return [prefetched[r] for r in walk(tree)]
1874
1874
1875
1875
1876 def getdescfromdrev(drev):
1876 def getdescfromdrev(drev):
1877 """get description (commit message) from "Differential Revision"
1877 """get description (commit message) from "Differential Revision"
1878
1878
1879 This is similar to differential.getcommitmessage API. But we only care
1879 This is similar to differential.getcommitmessage API. But we only care
1880 about limited fields: title, summary, test plan, and URL.
1880 about limited fields: title, summary, test plan, and URL.
1881 """
1881 """
1882 title = drev[b'title']
1882 title = drev[b'title']
1883 summary = drev[b'summary'].rstrip()
1883 summary = drev[b'summary'].rstrip()
1884 testplan = drev[b'testPlan'].rstrip()
1884 testplan = drev[b'testPlan'].rstrip()
1885 if testplan:
1885 if testplan:
1886 testplan = b'Test Plan:\n%s' % testplan
1886 testplan = b'Test Plan:\n%s' % testplan
1887 uri = b'Differential Revision: %s' % drev[b'uri']
1887 uri = b'Differential Revision: %s' % drev[b'uri']
1888 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1888 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1889
1889
1890
1890
1891 def get_amended_desc(drev, ctx, folded):
1891 def get_amended_desc(drev, ctx, folded):
1892 """similar to ``getdescfromdrev``, but supports a folded series of commits
1892 """similar to ``getdescfromdrev``, but supports a folded series of commits
1893
1893
1894 This is used when determining if an individual commit needs to have its
1894 This is used when determining if an individual commit needs to have its
1895 message amended after posting it for review. The determination is made for
1895 message amended after posting it for review. The determination is made for
1896 each individual commit, even when they were folded into one review.
1896 each individual commit, even when they were folded into one review.
1897 """
1897 """
1898 if not folded:
1898 if not folded:
1899 return getdescfromdrev(drev)
1899 return getdescfromdrev(drev)
1900
1900
1901 uri = b'Differential Revision: %s' % drev[b'uri']
1901 uri = b'Differential Revision: %s' % drev[b'uri']
1902
1902
1903 # Since the commit messages were combined when posting multiple commits
1903 # Since the commit messages were combined when posting multiple commits
1904 # with --fold, the fields can't be read from Phabricator here, or *all*
1904 # with --fold, the fields can't be read from Phabricator here, or *all*
1905 # affected local revisions will end up with the same commit message after
1905 # affected local revisions will end up with the same commit message after
1906 # the URI is amended in. Append in the DREV line, or update it if it
1906 # the URI is amended in. Append in the DREV line, or update it if it
1907 # exists. At worst, this means commit message or test plan updates on
1907 # exists. At worst, this means commit message or test plan updates on
1908 # Phabricator aren't propagated back to the repository, but that seems
1908 # Phabricator aren't propagated back to the repository, but that seems
1909 # reasonable for the case where local commits are effectively combined
1909 # reasonable for the case where local commits are effectively combined
1910 # in Phabricator.
1910 # in Phabricator.
1911 m = _differentialrevisiondescre.search(ctx.description())
1911 m = _differentialrevisiondescre.search(ctx.description())
1912 if not m:
1912 if not m:
1913 return b'\n\n'.join([ctx.description(), uri])
1913 return b'\n\n'.join([ctx.description(), uri])
1914
1914
1915 return _differentialrevisiondescre.sub(uri, ctx.description())
1915 return _differentialrevisiondescre.sub(uri, ctx.description())
1916
1916
1917
1917
1918 def getlocalcommits(diff):
1918 def getlocalcommits(diff):
1919 """get the set of local commits from a diff object
1919 """get the set of local commits from a diff object
1920
1920
1921 See ``getdiffmeta()`` for an example diff object.
1921 See ``getdiffmeta()`` for an example diff object.
1922 """
1922 """
1923 props = diff.get(b'properties') or {}
1923 props = diff.get(b'properties') or {}
1924 commits = props.get(b'local:commits') or {}
1924 commits = props.get(b'local:commits') or {}
1925 if len(commits) > 1:
1925 if len(commits) > 1:
1926 return {bin(c) for c in commits.keys()}
1926 return {bin(c) for c in commits.keys()}
1927
1927
1928 # Storing the diff metadata predates storing `local:commits`, so continue
1928 # Storing the diff metadata predates storing `local:commits`, so continue
1929 # to use that in the --no-fold case.
1929 # to use that in the --no-fold case.
1930 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1930 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1931
1931
1932
1932
1933 def getdiffmeta(diff):
1933 def getdiffmeta(diff):
1934 """get commit metadata (date, node, user, p1) from a diff object
1934 """get commit metadata (date, node, user, p1) from a diff object
1935
1935
1936 The metadata could be "hg:meta", sent by phabsend, like:
1936 The metadata could be "hg:meta", sent by phabsend, like:
1937
1937
1938 "properties": {
1938 "properties": {
1939 "hg:meta": {
1939 "hg:meta": {
1940 "branch": "default",
1940 "branch": "default",
1941 "date": "1499571514 25200",
1941 "date": "1499571514 25200",
1942 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1942 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
1943 "user": "Foo Bar <foo@example.com>",
1943 "user": "Foo Bar <foo@example.com>",
1944 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1944 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
1945 }
1945 }
1946 }
1946 }
1947
1947
1948 Or converted from "local:commits", sent by "arc", like:
1948 Or converted from "local:commits", sent by "arc", like:
1949
1949
1950 "properties": {
1950 "properties": {
1951 "local:commits": {
1951 "local:commits": {
1952 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1952 "98c08acae292b2faf60a279b4189beb6cff1414d": {
1953 "author": "Foo Bar",
1953 "author": "Foo Bar",
1954 "authorEmail": "foo@example.com"
1954 "authorEmail": "foo@example.com"
1955 "branch": "default",
1955 "branch": "default",
1956 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1956 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
1957 "local": "1000",
1957 "local": "1000",
1958 "message": "...",
1958 "message": "...",
1959 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1959 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
1960 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1960 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
1961 "summary": "...",
1961 "summary": "...",
1962 "tag": "",
1962 "tag": "",
1963 "time": 1499546314,
1963 "time": 1499546314,
1964 }
1964 }
1965 }
1965 }
1966 }
1966 }
1967
1967
1968 Note: metadata extracted from "local:commits" will lose time zone
1968 Note: metadata extracted from "local:commits" will lose time zone
1969 information.
1969 information.
1970 """
1970 """
1971 props = diff.get(b'properties') or {}
1971 props = diff.get(b'properties') or {}
1972 meta = props.get(b'hg:meta')
1972 meta = props.get(b'hg:meta')
1973 if not meta:
1973 if not meta:
1974 if props.get(b'local:commits'):
1974 if props.get(b'local:commits'):
1975 commit = sorted(props[b'local:commits'].values())[0]
1975 commit = sorted(props[b'local:commits'].values())[0]
1976 meta = {}
1976 meta = {}
1977 if b'author' in commit and b'authorEmail' in commit:
1977 if b'author' in commit and b'authorEmail' in commit:
1978 meta[b'user'] = b'%s <%s>' % (
1978 meta[b'user'] = b'%s <%s>' % (
1979 commit[b'author'],
1979 commit[b'author'],
1980 commit[b'authorEmail'],
1980 commit[b'authorEmail'],
1981 )
1981 )
1982 if b'time' in commit:
1982 if b'time' in commit:
1983 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1983 meta[b'date'] = b'%d 0' % int(commit[b'time'])
1984 if b'branch' in commit:
1984 if b'branch' in commit:
1985 meta[b'branch'] = commit[b'branch']
1985 meta[b'branch'] = commit[b'branch']
1986 node = commit.get(b'commit', commit.get(b'rev'))
1986 node = commit.get(b'commit', commit.get(b'rev'))
1987 if node:
1987 if node:
1988 meta[b'node'] = node
1988 meta[b'node'] = node
1989 if len(commit.get(b'parents', ())) >= 1:
1989 if len(commit.get(b'parents', ())) >= 1:
1990 meta[b'parent'] = commit[b'parents'][0]
1990 meta[b'parent'] = commit[b'parents'][0]
1991 else:
1991 else:
1992 meta = {}
1992 meta = {}
1993 if b'date' not in meta and b'dateCreated' in diff:
1993 if b'date' not in meta and b'dateCreated' in diff:
1994 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1994 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
1995 if b'branch' not in meta and diff.get(b'branch'):
1995 if b'branch' not in meta and diff.get(b'branch'):
1996 meta[b'branch'] = diff[b'branch']
1996 meta[b'branch'] = diff[b'branch']
1997 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1997 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
1998 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1998 meta[b'parent'] = diff[b'sourceControlBaseRevision']
1999 return meta
1999 return meta
2000
2000
2001
2001
2002 def _getdrevs(ui, stack, specs):
2002 def _getdrevs(ui, stack, specs):
2003 """convert user supplied DREVSPECs into "Differential Revision" dicts
2003 """convert user supplied DREVSPECs into "Differential Revision" dicts
2004
2004
2005 See ``hg help phabread`` for how to specify each DREVSPEC.
2005 See ``hg help phabread`` for how to specify each DREVSPEC.
2006 """
2006 """
2007 if len(specs) > 0:
2007 if len(specs) > 0:
2008
2008
2009 def _formatspec(s):
2009 def _formatspec(s):
2010 if stack:
2010 if stack:
2011 s = b':(%s)' % s
2011 s = b':(%s)' % s
2012 return b'(%s)' % s
2012 return b'(%s)' % s
2013
2013
2014 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2014 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2015
2015
2016 drevs = querydrev(ui, spec)
2016 drevs = querydrev(ui, spec)
2017 if drevs:
2017 if drevs:
2018 return drevs
2018 return drevs
2019
2019
2020 raise error.Abort(_(b"empty DREVSPEC set"))
2020 raise error.Abort(_(b"empty DREVSPEC set"))
2021
2021
2022
2022
2023 def readpatch(ui, drevs, write):
2023 def readpatch(ui, drevs, write):
2024 """generate plain-text patch readable by 'hg import'
2024 """generate plain-text patch readable by 'hg import'
2025
2025
2026 write takes a list of (DREV, bytes), where DREV is the differential number
2026 write takes a list of (DREV, bytes), where DREV is the differential number
2027 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2027 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2028 to be imported. drevs is what "querydrev" returns, results of
2028 to be imported. drevs is what "querydrev" returns, results of
2029 "differential.query".
2029 "differential.query".
2030 """
2030 """
2031 # Prefetch hg:meta property for all diffs
2031 # Prefetch hg:meta property for all diffs
2032 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2032 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2033 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2033 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2034
2034
2035 patches = []
2035 patches = []
2036
2036
2037 # Generate patch for each drev
2037 # Generate patch for each drev
2038 for drev in drevs:
2038 for drev in drevs:
2039 ui.note(_(b'reading D%s\n') % drev[b'id'])
2039 ui.note(_(b'reading D%s\n') % drev[b'id'])
2040
2040
2041 diffid = max(int(v) for v in drev[b'diffs'])
2041 diffid = max(int(v) for v in drev[b'diffs'])
2042 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2042 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2043 desc = getdescfromdrev(drev)
2043 desc = getdescfromdrev(drev)
2044 header = b'# HG changeset patch\n'
2044 header = b'# HG changeset patch\n'
2045
2045
2046 # Try to preserve metadata from hg:meta property. Write hg patch
2046 # Try to preserve metadata from hg:meta property. Write hg patch
2047 # headers that can be read by the "import" command. See patchheadermap
2047 # headers that can be read by the "import" command. See patchheadermap
2048 # and extract in mercurial/patch.py for supported headers.
2048 # and extract in mercurial/patch.py for supported headers.
2049 meta = getdiffmeta(diffs[b'%d' % diffid])
2049 meta = getdiffmeta(diffs[b'%d' % diffid])
2050 for k in _metanamemap.keys():
2050 for k in _metanamemap.keys():
2051 if k in meta:
2051 if k in meta:
2052 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2052 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2053
2053
2054 content = b'%s%s\n%s' % (header, desc, body)
2054 content = b'%s%s\n%s' % (header, desc, body)
2055 patches.append((drev[b'id'], content))
2055 patches.append((drev[b'id'], content))
2056
2056
2057 # Write patches to the supplied callback
2057 # Write patches to the supplied callback
2058 write(patches)
2058 write(patches)
2059
2059
2060
2060
2061 @vcrcommand(
2061 @vcrcommand(
2062 b'phabread',
2062 b'phabread',
2063 [(b'', b'stack', False, _(b'read dependencies'))],
2063 [(b'', b'stack', False, _(b'read dependencies'))],
2064 _(b'DREVSPEC... [OPTIONS]'),
2064 _(b'DREVSPEC... [OPTIONS]'),
2065 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2065 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2066 optionalrepo=True,
2066 optionalrepo=True,
2067 )
2067 )
2068 def phabread(ui, repo, *specs, **opts):
2068 def phabread(ui, repo, *specs, **opts):
2069 """print patches from Phabricator suitable for importing
2069 """print patches from Phabricator suitable for importing
2070
2070
2071 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2071 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2072 the number ``123``. It could also have common operators like ``+``, ``-``,
2072 the number ``123``. It could also have common operators like ``+``, ``-``,
2073 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2073 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2074 select a stack. If multiple DREVSPEC values are given, the result is the
2074 select a stack. If multiple DREVSPEC values are given, the result is the
2075 union of each individually evaluated value. No attempt is currently made
2075 union of each individually evaluated value. No attempt is currently made
2076 to reorder the values to run from parent to child.
2076 to reorder the values to run from parent to child.
2077
2077
2078 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2078 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2079 could be used to filter patches by status. For performance reason, they
2079 could be used to filter patches by status. For performance reason, they
2080 only represent a subset of non-status selections and cannot be used alone.
2080 only represent a subset of non-status selections and cannot be used alone.
2081
2081
2082 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2082 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2083 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2083 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2084 stack up to D9.
2084 stack up to D9.
2085
2085
2086 If --stack is given, follow dependencies information and read all patches.
2086 If --stack is given, follow dependencies information and read all patches.
2087 It is equivalent to the ``:`` operator.
2087 It is equivalent to the ``:`` operator.
2088 """
2088 """
2089 opts = pycompat.byteskwargs(opts)
2089 opts = pycompat.byteskwargs(opts)
2090 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2090 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2091
2091
2092 def _write(patches):
2092 def _write(patches):
2093 for drev, content in patches:
2093 for drev, content in patches:
2094 ui.write(content)
2094 ui.write(content)
2095
2095
2096 readpatch(ui, drevs, _write)
2096 readpatch(ui, drevs, _write)
2097
2097
2098
2098
2099 @vcrcommand(
2099 @vcrcommand(
2100 b'phabimport',
2100 b'phabimport',
2101 [(b'', b'stack', False, _(b'import dependencies as well'))],
2101 [(b'', b'stack', False, _(b'import dependencies as well'))],
2102 _(b'DREVSPEC... [OPTIONS]'),
2102 _(b'DREVSPEC... [OPTIONS]'),
2103 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2103 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2104 )
2104 )
2105 def phabimport(ui, repo, *specs, **opts):
2105 def phabimport(ui, repo, *specs, **opts):
2106 """import patches from Phabricator for the specified Differential Revisions
2106 """import patches from Phabricator for the specified Differential Revisions
2107
2107
2108 The patches are read and applied starting at the parent of the working
2108 The patches are read and applied starting at the parent of the working
2109 directory.
2109 directory.
2110
2110
2111 See ``hg help phabread`` for how to specify DREVSPEC.
2111 See ``hg help phabread`` for how to specify DREVSPEC.
2112 """
2112 """
2113 opts = pycompat.byteskwargs(opts)
2113 opts = pycompat.byteskwargs(opts)
2114
2114
2115 # --bypass avoids losing exec and symlink bits when importing on Windows,
2115 # --bypass avoids losing exec and symlink bits when importing on Windows,
2116 # and allows importing with a dirty wdir. It also aborts instead of leaving
2116 # and allows importing with a dirty wdir. It also aborts instead of leaving
2117 # rejects.
2117 # rejects.
2118 opts[b'bypass'] = True
2118 opts[b'bypass'] = True
2119
2119
2120 # Mandatory default values, synced with commands.import
2120 # Mandatory default values, synced with commands.import
2121 opts[b'strip'] = 1
2121 opts[b'strip'] = 1
2122 opts[b'prefix'] = b''
2122 opts[b'prefix'] = b''
2123 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2123 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2124 opts[b'obsolete'] = False
2124 opts[b'obsolete'] = False
2125
2125
2126 if ui.configbool(b'phabimport', b'secret'):
2126 if ui.configbool(b'phabimport', b'secret'):
2127 opts[b'secret'] = True
2127 opts[b'secret'] = True
2128 if ui.configbool(b'phabimport', b'obsolete'):
2128 if ui.configbool(b'phabimport', b'obsolete'):
2129 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2129 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2130
2130
2131 def _write(patches):
2131 def _write(patches):
2132 parents = repo[None].parents()
2132 parents = repo[None].parents()
2133
2133
2134 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2134 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2135 for drev, contents in patches:
2135 for drev, contents in patches:
2136 ui.status(_(b'applying patch from D%s\n') % drev)
2136 ui.status(_(b'applying patch from D%s\n') % drev)
2137
2137
2138 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2138 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2139 msg, node, rej = cmdutil.tryimportone(
2139 msg, node, rej = cmdutil.tryimportone(
2140 ui,
2140 ui,
2141 repo,
2141 repo,
2142 patchdata,
2142 patchdata,
2143 parents,
2143 parents,
2144 opts,
2144 opts,
2145 [],
2145 [],
2146 None, # Never update wdir to another revision
2146 None, # Never update wdir to another revision
2147 )
2147 )
2148
2148
2149 if not node:
2149 if not node:
2150 raise error.Abort(_(b'D%s: no diffs found') % drev)
2150 raise error.Abort(_(b'D%s: no diffs found') % drev)
2151
2151
2152 ui.note(msg + b'\n')
2152 ui.note(msg + b'\n')
2153 parents = [repo[node]]
2153 parents = [repo[node]]
2154
2154
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2156
2156
2157 readpatch(repo.ui, drevs, _write)
2157 readpatch(repo.ui, drevs, _write)
2158
2158
2159
2159
2160 @vcrcommand(
2160 @vcrcommand(
2161 b'phabupdate',
2161 b'phabupdate',
2162 [
2162 [
2163 (b'', b'accept', False, _(b'accept revisions')),
2163 (b'', b'accept', False, _(b'accept revisions')),
2164 (b'', b'reject', False, _(b'reject revisions')),
2164 (b'', b'reject', False, _(b'reject revisions')),
2165 (b'', b'abandon', False, _(b'abandon revisions')),
2165 (b'', b'abandon', False, _(b'abandon revisions')),
2166 (b'', b'reclaim', False, _(b'reclaim revisions')),
2166 (b'', b'reclaim', False, _(b'reclaim revisions')),
2167 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2167 (b'm', b'comment', b'', _(b'comment on the last revision')),
2168 (b'm', b'comment', b'', _(b'comment on the last revision')),
2168 ],
2169 ],
2169 _(b'DREVSPEC... [OPTIONS]'),
2170 _(b'DREVSPEC... [OPTIONS]'),
2170 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2171 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2171 optionalrepo=True,
2172 optionalrepo=True,
2172 )
2173 )
2173 def phabupdate(ui, repo, *specs, **opts):
2174 def phabupdate(ui, repo, *specs, **opts):
2174 """update Differential Revision in batch
2175 """update Differential Revision in batch
2175
2176
2176 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2177 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2177 """
2178 """
2178 opts = pycompat.byteskwargs(opts)
2179 opts = pycompat.byteskwargs(opts)
2179 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
2180 transactions = [
2181 b'abandon',
2182 b'accept',
2183 b'plan-changes',
2184 b'reclaim',
2185 b'reject',
2186 ]
2187 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2180 if len(flags) > 1:
2188 if len(flags) > 1:
2181 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2189 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2182
2190
2183 actions = []
2191 actions = []
2184 for f in flags:
2192 for f in flags:
2185 actions.append({b'type': f, b'value': True})
2193 actions.append({b'type': f, b'value': True})
2186
2194
2187 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2195 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2188 for i, drev in enumerate(drevs):
2196 for i, drev in enumerate(drevs):
2189 if i + 1 == len(drevs) and opts.get(b'comment'):
2197 if i + 1 == len(drevs) and opts.get(b'comment'):
2190 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2198 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2191 if actions:
2199 if actions:
2192 params = {
2200 params = {
2193 b'objectIdentifier': drev[b'phid'],
2201 b'objectIdentifier': drev[b'phid'],
2194 b'transactions': actions,
2202 b'transactions': actions,
2195 }
2203 }
2196 callconduit(ui, b'differential.revision.edit', params)
2204 callconduit(ui, b'differential.revision.edit', params)
2197
2205
2198
2206
2199 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2207 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2200 def template_review(context, mapping):
2208 def template_review(context, mapping):
2201 """:phabreview: Object describing the review for this changeset.
2209 """:phabreview: Object describing the review for this changeset.
2202 Has attributes `url` and `id`.
2210 Has attributes `url` and `id`.
2203 """
2211 """
2204 ctx = context.resource(mapping, b'ctx')
2212 ctx = context.resource(mapping, b'ctx')
2205 m = _differentialrevisiondescre.search(ctx.description())
2213 m = _differentialrevisiondescre.search(ctx.description())
2206 if m:
2214 if m:
2207 return templateutil.hybriddict(
2215 return templateutil.hybriddict(
2208 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2216 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2209 )
2217 )
2210 else:
2218 else:
2211 tags = ctx.repo().nodetags(ctx.node())
2219 tags = ctx.repo().nodetags(ctx.node())
2212 for t in tags:
2220 for t in tags:
2213 if _differentialrevisiontagre.match(t):
2221 if _differentialrevisiontagre.match(t):
2214 url = ctx.repo().ui.config(b'phabricator', b'url')
2222 url = ctx.repo().ui.config(b'phabricator', b'url')
2215 if not url.endswith(b'/'):
2223 if not url.endswith(b'/'):
2216 url += b'/'
2224 url += b'/'
2217 url += t
2225 url += t
2218
2226
2219 return templateutil.hybriddict({b'url': url, b'id': t,})
2227 return templateutil.hybriddict({b'url': url, b'id': t,})
2220 return None
2228 return None
2221
2229
2222
2230
2223 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2231 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2224 def template_status(context, mapping):
2232 def template_status(context, mapping):
2225 """:phabstatus: String. Status of Phabricator differential.
2233 """:phabstatus: String. Status of Phabricator differential.
2226 """
2234 """
2227 ctx = context.resource(mapping, b'ctx')
2235 ctx = context.resource(mapping, b'ctx')
2228 repo = context.resource(mapping, b'repo')
2236 repo = context.resource(mapping, b'repo')
2229 ui = context.resource(mapping, b'ui')
2237 ui = context.resource(mapping, b'ui')
2230
2238
2231 rev = ctx.rev()
2239 rev = ctx.rev()
2232 try:
2240 try:
2233 drevid = getdrevmap(repo, [rev])[rev]
2241 drevid = getdrevmap(repo, [rev])[rev]
2234 except KeyError:
2242 except KeyError:
2235 return None
2243 return None
2236 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2244 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2237 for drev in drevs:
2245 for drev in drevs:
2238 if int(drev[b'id']) == drevid:
2246 if int(drev[b'id']) == drevid:
2239 return templateutil.hybriddict(
2247 return templateutil.hybriddict(
2240 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2248 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2241 )
2249 )
2242 return None
2250 return None
2243
2251
2244
2252
2245 @show.showview(b'phabstatus', csettopic=b'work')
2253 @show.showview(b'phabstatus', csettopic=b'work')
2246 def phabstatusshowview(ui, repo, displayer):
2254 def phabstatusshowview(ui, repo, displayer):
2247 """Phabricator differiential status"""
2255 """Phabricator differiential status"""
2248 revs = repo.revs('sort(_underway(), topo)')
2256 revs = repo.revs('sort(_underway(), topo)')
2249 drevmap = getdrevmap(repo, revs)
2257 drevmap = getdrevmap(repo, revs)
2250 unknownrevs, drevids, revsbydrevid = [], set(), {}
2258 unknownrevs, drevids, revsbydrevid = [], set(), {}
2251 for rev, drevid in pycompat.iteritems(drevmap):
2259 for rev, drevid in pycompat.iteritems(drevmap):
2252 if drevid is not None:
2260 if drevid is not None:
2253 drevids.add(drevid)
2261 drevids.add(drevid)
2254 revsbydrevid.setdefault(drevid, set()).add(rev)
2262 revsbydrevid.setdefault(drevid, set()).add(rev)
2255 else:
2263 else:
2256 unknownrevs.append(rev)
2264 unknownrevs.append(rev)
2257
2265
2258 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2266 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2259 drevsbyrev = {}
2267 drevsbyrev = {}
2260 for drev in drevs:
2268 for drev in drevs:
2261 for rev in revsbydrevid[int(drev[b'id'])]:
2269 for rev in revsbydrevid[int(drev[b'id'])]:
2262 drevsbyrev[rev] = drev
2270 drevsbyrev[rev] = drev
2263
2271
2264 def phabstatus(ctx):
2272 def phabstatus(ctx):
2265 drev = drevsbyrev[ctx.rev()]
2273 drev = drevsbyrev[ctx.rev()]
2266 status = ui.label(
2274 status = ui.label(
2267 b'%(statusName)s' % drev,
2275 b'%(statusName)s' % drev,
2268 b'phabricator.status.%s' % _getstatusname(drev),
2276 b'phabricator.status.%s' % _getstatusname(drev),
2269 )
2277 )
2270 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2278 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2271
2279
2272 revs -= smartset.baseset(unknownrevs)
2280 revs -= smartset.baseset(unknownrevs)
2273 revdag = graphmod.dagwalker(repo, revs)
2281 revdag = graphmod.dagwalker(repo, revs)
2274
2282
2275 ui.setconfig(b'experimental', b'graphshorten', True)
2283 ui.setconfig(b'experimental', b'graphshorten', True)
2276 displayer._exthook = phabstatus
2284 displayer._exthook = phabstatus
2277 nodelen = show.longestshortest(repo, revs)
2285 nodelen = show.longestshortest(repo, revs)
2278 logcmdutil.displaygraph(
2286 logcmdutil.displaygraph(
2279 ui,
2287 ui,
2280 repo,
2288 repo,
2281 revdag,
2289 revdag,
2282 displayer,
2290 displayer,
2283 graphmod.asciiedges,
2291 graphmod.asciiedges,
2284 props={b'nodelen': nodelen},
2292 props={b'nodelen': nodelen},
2285 )
2293 )
@@ -1,965 +1,969 b''
1 #require vcr
1 #require vcr
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > phabricator =
4 > phabricator =
5 >
5 >
6 > [auth]
6 > [auth]
7 > hgphab.schemes = https
7 > hgphab.schemes = https
8 > hgphab.prefix = phab.mercurial-scm.org
8 > hgphab.prefix = phab.mercurial-scm.org
9 > # When working on the extension and making phabricator interaction
9 > # When working on the extension and making phabricator interaction
10 > # changes, edit this to be a real phabricator token. When done, edit
10 > # changes, edit this to be a real phabricator token. When done, edit
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
11 > # it back. The VCR transcripts will be auto-sanitised to replace your real
12 > # token with this value.
12 > # token with this value.
13 > hgphab.phabtoken = cli-hahayouwish
13 > hgphab.phabtoken = cli-hahayouwish
14 >
14 >
15 > [phabricator]
15 > [phabricator]
16 > debug = True
16 > debug = True
17 > EOF
17 > EOF
18 $ hg init repo
18 $ hg init repo
19 $ cd repo
19 $ cd repo
20 $ cat >> .hg/hgrc <<EOF
20 $ cat >> .hg/hgrc <<EOF
21 > [phabricator]
21 > [phabricator]
22 > url = https://phab.mercurial-scm.org/
22 > url = https://phab.mercurial-scm.org/
23 > callsign = HG
23 > callsign = HG
24 > EOF
24 > EOF
25 $ VCR="$TESTDIR/phabricator"
25 $ VCR="$TESTDIR/phabricator"
26
26
27 Error is handled reasonably. We override the phabtoken here so that
27 Error is handled reasonably. We override the phabtoken here so that
28 when you're developing changes to phabricator.py you can edit the
28 when you're developing changes to phabricator.py you can edit the
29 above config and have a real token in the test but not have to edit
29 above config and have a real token in the test but not have to edit
30 this test.
30 this test.
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
31 $ hg phabread --config auth.hgphab.phabtoken=cli-notavalidtoken \
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
32 > --test-vcr "$VCR/phabread-conduit-error.json" D4480 | head
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
33 abort: Conduit Error (ERR-INVALID-AUTH): API token "cli-notavalidtoken" has the wrong length. API tokens should be 32 characters long.
34
34
35 Missing arguments don't crash, and may print the command help
35 Missing arguments don't crash, and may print the command help
36
36
37 $ hg debugcallconduit
37 $ hg debugcallconduit
38 hg debugcallconduit: invalid arguments
38 hg debugcallconduit: invalid arguments
39 hg debugcallconduit METHOD
39 hg debugcallconduit METHOD
40
40
41 call Conduit API
41 call Conduit API
42
42
43 options:
43 options:
44
44
45 (use 'hg debugcallconduit -h' to show more help)
45 (use 'hg debugcallconduit -h' to show more help)
46 [255]
46 [255]
47 $ hg phabread
47 $ hg phabread
48 abort: empty DREVSPEC set
48 abort: empty DREVSPEC set
49 [255]
49 [255]
50
50
51 Basic phabread:
51 Basic phabread:
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
52 $ hg phabread --test-vcr "$VCR/phabread-4480.json" D4480 | head
53 # HG changeset patch
53 # HG changeset patch
54 # Date 1536771503 0
54 # Date 1536771503 0
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
55 # Parent a5de21c9e3703f8e8eb064bd7d893ff2f703c66a
56 exchangev2: start to implement pull with wire protocol v2
56 exchangev2: start to implement pull with wire protocol v2
57
57
58 Wire protocol version 2 will take a substantially different
58 Wire protocol version 2 will take a substantially different
59 approach to exchange than version 1 (at least as far as pulling
59 approach to exchange than version 1 (at least as far as pulling
60 is concerned).
60 is concerned).
61
61
62 This commit establishes a new exchangev2 module for holding
62 This commit establishes a new exchangev2 module for holding
63
63
64 Phabread with multiple DREVSPEC
64 Phabread with multiple DREVSPEC
65
65
66 TODO: attempt to order related revisions like --stack?
66 TODO: attempt to order related revisions like --stack?
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
67 $ hg phabread --test-vcr "$VCR/phabread-multi-drev.json" D8205 8206 D8207 \
68 > | grep '^Differential Revision'
68 > | grep '^Differential Revision'
69 Differential Revision: https://phab.mercurial-scm.org/D8205
69 Differential Revision: https://phab.mercurial-scm.org/D8205
70 Differential Revision: https://phab.mercurial-scm.org/D8206
70 Differential Revision: https://phab.mercurial-scm.org/D8206
71 Differential Revision: https://phab.mercurial-scm.org/D8207
71 Differential Revision: https://phab.mercurial-scm.org/D8207
72
72
73 Empty DREVSPECs don't crash
73 Empty DREVSPECs don't crash
74
74
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
75 $ hg phabread --test-vcr "$VCR/phabread-empty-drev.json" D7917-D7917
76 abort: empty DREVSPEC set
76 abort: empty DREVSPEC set
77 [255]
77 [255]
78
78
79
79
80 phabupdate with an accept:
80 phabupdate with an accept:
81 $ hg phabupdate --accept D4564 \
81 $ hg phabupdate --accept D4564 \
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
82 > -m 'I think I like where this is headed. Will read rest of series later.'\
83 > --test-vcr "$VCR/accept-4564.json"
83 > --test-vcr "$VCR/accept-4564.json"
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
84 abort: Conduit Error (ERR-CONDUIT-CORE): Validation errors:
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
85 - You can not accept this revision because it has already been closed. Only open revisions can be accepted.
86 [255]
86 [255]
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
87 $ hg phabupdate --accept D7913 -m 'LGTM' --test-vcr "$VCR/accept-7913.json"
88
88
89 phabupdate with --plan-changes:
90
91 $ hg phabupdate --plan-changes D6876 --test-vcr "$VCR/phabupdate-change-6876.json"
92
89 Create a differential diff:
93 Create a differential diff:
90 $ HGENCODING=utf-8; export HGENCODING
94 $ HGENCODING=utf-8; export HGENCODING
91 $ echo alpha > alpha
95 $ echo alpha > alpha
92 $ hg ci --addremove -m 'create alpha for phabricator test €'
96 $ hg ci --addremove -m 'create alpha for phabricator test €'
93 adding alpha
97 adding alpha
94 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
98 $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
95 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
99 D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
96 new commits: ['347bf67801e5']
100 new commits: ['347bf67801e5']
97 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
101 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
98 $ echo more >> alpha
102 $ echo more >> alpha
99 $ HGEDITOR=true hg ci --amend
103 $ HGEDITOR=true hg ci --amend
100 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
104 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/347bf67801e5-3bf313e4-amend.hg
101 $ echo beta > beta
105 $ echo beta > beta
102 $ hg ci --addremove -m 'create beta for phabricator test'
106 $ hg ci --addremove -m 'create beta for phabricator test'
103 adding beta
107 adding beta
104 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
108 $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
105 c44b38f24a45 mapped to old nodes []
109 c44b38f24a45 mapped to old nodes []
106 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
110 D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
107 D7916 - created - 9e6901f21d5b: create beta for phabricator test
111 D7916 - created - 9e6901f21d5b: create beta for phabricator test
108 new commits: ['a692622e6937']
112 new commits: ['a692622e6937']
109 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
113 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
110 $ unset HGENCODING
114 $ unset HGENCODING
111
115
112 The amend won't explode after posting a public commit. The local tag is left
116 The amend won't explode after posting a public commit. The local tag is left
113 behind to identify it.
117 behind to identify it.
114
118
115 $ echo 'public change' > beta
119 $ echo 'public change' > beta
116 $ hg ci -m 'create public change for phabricator testing'
120 $ hg ci -m 'create public change for phabricator testing'
117 $ hg phase --public .
121 $ hg phase --public .
118 $ echo 'draft change' > alpha
122 $ echo 'draft change' > alpha
119 $ hg ci -m 'create draft change for phabricator testing'
123 $ hg ci -m 'create draft change for phabricator testing'
120 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
124 $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
121 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
125 D7917 - created - 7b4185ab5d16: create public change for phabricator testing
122 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
126 D7918 - created - 251c1c333fc6: create draft change for phabricator testing
123 warning: not updating public commit 2:7b4185ab5d16
127 warning: not updating public commit 2:7b4185ab5d16
124 new commits: ['3244dc4a3334']
128 new commits: ['3244dc4a3334']
125 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
129 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
126 $ hg tags -v
130 $ hg tags -v
127 tip 3:3244dc4a3334
131 tip 3:3244dc4a3334
128 D7917 2:7b4185ab5d16 local
132 D7917 2:7b4185ab5d16 local
129
133
130 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
134 $ hg debugcallconduit user.search --test-vcr "$VCR/phab-conduit.json" <<EOF
131 > {
135 > {
132 > "constraints": {
136 > "constraints": {
133 > "isBot": true
137 > "isBot": true
134 > }
138 > }
135 > }
139 > }
136 > EOF
140 > EOF
137 {
141 {
138 "cursor": {
142 "cursor": {
139 "after": null,
143 "after": null,
140 "before": null,
144 "before": null,
141 "limit": 100,
145 "limit": 100,
142 "order": null
146 "order": null
143 },
147 },
144 "data": [],
148 "data": [],
145 "maps": {},
149 "maps": {},
146 "query": {
150 "query": {
147 "queryKey": null
151 "queryKey": null
148 }
152 }
149 }
153 }
150
154
151 Template keywords
155 Template keywords
152 $ hg log -T'{rev} {phabreview|json}\n'
156 $ hg log -T'{rev} {phabreview|json}\n'
153 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
157 3 {"id": "D7918", "url": "https://phab.mercurial-scm.org/D7918"}
154 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
158 2 {"id": "D7917", "url": "https://phab.mercurial-scm.org/D7917"}
155 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
159 1 {"id": "D7916", "url": "https://phab.mercurial-scm.org/D7916"}
156 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
160 0 {"id": "D7915", "url": "https://phab.mercurial-scm.org/D7915"}
157
161
158 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
162 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n'
159 3 https://phab.mercurial-scm.org/D7918 D7918
163 3 https://phab.mercurial-scm.org/D7918 D7918
160 2 https://phab.mercurial-scm.org/D7917 D7917
164 2 https://phab.mercurial-scm.org/D7917 D7917
161 1 https://phab.mercurial-scm.org/D7916 D7916
165 1 https://phab.mercurial-scm.org/D7916 D7916
162 0 https://phab.mercurial-scm.org/D7915 D7915
166 0 https://phab.mercurial-scm.org/D7915 D7915
163
167
164 Commenting when phabsending:
168 Commenting when phabsending:
165 $ echo comment > comment
169 $ echo comment > comment
166 $ hg ci --addremove -m "create comment for phabricator test"
170 $ hg ci --addremove -m "create comment for phabricator test"
167 adding comment
171 adding comment
168 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
172 $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
169 D7919 - created - d5dddca9023d: create comment for phabricator test
173 D7919 - created - d5dddca9023d: create comment for phabricator test
170 new commits: ['f7db812bbe1d']
174 new commits: ['f7db812bbe1d']
171 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
175 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
172 $ echo comment2 >> comment
176 $ echo comment2 >> comment
173 $ hg ci --amend
177 $ hg ci --amend
174 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
178 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
175 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
179 $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
176 1849d7828727 mapped to old nodes []
180 1849d7828727 mapped to old nodes []
177 D7919 - updated - 1849d7828727: create comment for phabricator test
181 D7919 - updated - 1849d7828727: create comment for phabricator test
178
182
179 Phabsending a skipped commit:
183 Phabsending a skipped commit:
180 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
184 $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
181 1849d7828727 mapped to old nodes ['1849d7828727']
185 1849d7828727 mapped to old nodes ['1849d7828727']
182 D7919 - skipped - 1849d7828727: create comment for phabricator test
186 D7919 - skipped - 1849d7828727: create comment for phabricator test
183
187
184 Phabsend doesn't create an instability when restacking existing revisions on top
188 Phabsend doesn't create an instability when restacking existing revisions on top
185 of new revisions.
189 of new revisions.
186
190
187 $ hg init reorder
191 $ hg init reorder
188 $ cd reorder
192 $ cd reorder
189 $ cat >> .hg/hgrc <<EOF
193 $ cat >> .hg/hgrc <<EOF
190 > [phabricator]
194 > [phabricator]
191 > url = https://phab.mercurial-scm.org/
195 > url = https://phab.mercurial-scm.org/
192 > callsign = HG
196 > callsign = HG
193 > [experimental]
197 > [experimental]
194 > evolution = all
198 > evolution = all
195 > EOF
199 > EOF
196
200
197 $ echo "add" > file1.txt
201 $ echo "add" > file1.txt
198 $ hg ci -Aqm 'added'
202 $ hg ci -Aqm 'added'
199 $ echo "mod1" > file1.txt
203 $ echo "mod1" > file1.txt
200 $ hg ci -m 'modified 1'
204 $ hg ci -m 'modified 1'
201 $ echo "mod2" > file1.txt
205 $ echo "mod2" > file1.txt
202 $ hg ci -m 'modified 2'
206 $ hg ci -m 'modified 2'
203 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
207 $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
204 D8433 - created - 5d3959e20d1d: modified 2
208 D8433 - created - 5d3959e20d1d: modified 2
205 new commits: ['2b4aa8a88d61']
209 new commits: ['2b4aa8a88d61']
206 $ hg log -G -T compact
210 $ hg log -G -T compact
207 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
211 @ 3[tip]:1 2b4aa8a88d61 1970-01-01 00:00 +0000 test
208 | modified 2
212 | modified 2
209 |
213 |
210 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
214 o 1 d549263bcb2d 1970-01-01 00:00 +0000 test
211 | modified 1
215 | modified 1
212 |
216 |
213 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
217 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
214 added
218 added
215
219
216 Also check that it doesn't create more orphans outside of the stack
220 Also check that it doesn't create more orphans outside of the stack
217
221
218 $ hg up -q 1
222 $ hg up -q 1
219 $ echo "mod3" > file1.txt
223 $ echo "mod3" > file1.txt
220 $ hg ci -m 'modified 3'
224 $ hg ci -m 'modified 3'
221 created new head
225 created new head
222 $ hg up -q 3
226 $ hg up -q 3
223 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
227 $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
224 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
228 2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
225 D8434 - created - d549263bcb2d: modified 1
229 D8434 - created - d549263bcb2d: modified 1
226 D8433 - updated - 2b4aa8a88d61: modified 2
230 D8433 - updated - 2b4aa8a88d61: modified 2
227 new commits: ['876a60d024de']
231 new commits: ['876a60d024de']
228 new commits: ['0c6523cb1d0f']
232 new commits: ['0c6523cb1d0f']
229 restabilizing 1eda4bf55021 as d2c78c3a3e01
233 restabilizing 1eda4bf55021 as d2c78c3a3e01
230 $ hg log -G -T compact
234 $ hg log -G -T compact
231 o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
235 o 7[tip]:5 d2c78c3a3e01 1970-01-01 00:00 +0000 test
232 | modified 3
236 | modified 3
233 |
237 |
234 | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
238 | @ 6 0c6523cb1d0f 1970-01-01 00:00 +0000 test
235 |/ modified 2
239 |/ modified 2
236 |
240 |
237 o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
241 o 5:0 876a60d024de 1970-01-01 00:00 +0000 test
238 | modified 1
242 | modified 1
239 |
243 |
240 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
244 o 0 5cbade24e0fa 1970-01-01 00:00 +0000 test
241 added
245 added
242
246
243 Posting obsolete commits is disallowed
247 Posting obsolete commits is disallowed
244
248
245 $ echo "mod3" > file1.txt
249 $ echo "mod3" > file1.txt
246 $ hg ci -m 'modified A'
250 $ hg ci -m 'modified A'
247 $ echo "mod4" > file1.txt
251 $ echo "mod4" > file1.txt
248 $ hg ci -m 'modified B'
252 $ hg ci -m 'modified B'
249
253
250 $ hg up '.^'
254 $ hg up '.^'
251 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
255 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
252 $ echo 'obsolete' > file1.txt
256 $ echo 'obsolete' > file1.txt
253 $ hg amend --config extensions.amend=
257 $ hg amend --config extensions.amend=
254 1 new orphan changesets
258 1 new orphan changesets
255 $ hg log -G
259 $ hg log -G
256 @ changeset: 10:082be6c94150
260 @ changeset: 10:082be6c94150
257 | tag: tip
261 | tag: tip
258 | parent: 6:0c6523cb1d0f
262 | parent: 6:0c6523cb1d0f
259 | user: test
263 | user: test
260 | date: Thu Jan 01 00:00:00 1970 +0000
264 | date: Thu Jan 01 00:00:00 1970 +0000
261 | summary: modified A
265 | summary: modified A
262 |
266 |
263 | * changeset: 9:a67643f48146
267 | * changeset: 9:a67643f48146
264 | | user: test
268 | | user: test
265 | | date: Thu Jan 01 00:00:00 1970 +0000
269 | | date: Thu Jan 01 00:00:00 1970 +0000
266 | | instability: orphan
270 | | instability: orphan
267 | | summary: modified B
271 | | summary: modified B
268 | |
272 | |
269 | x changeset: 8:db79727cb2f7
273 | x changeset: 8:db79727cb2f7
270 |/ parent: 6:0c6523cb1d0f
274 |/ parent: 6:0c6523cb1d0f
271 | user: test
275 | user: test
272 | date: Thu Jan 01 00:00:00 1970 +0000
276 | date: Thu Jan 01 00:00:00 1970 +0000
273 | obsolete: rewritten using amend as 10:082be6c94150
277 | obsolete: rewritten using amend as 10:082be6c94150
274 | summary: modified A
278 | summary: modified A
275 |
279 |
276 | o changeset: 7:d2c78c3a3e01
280 | o changeset: 7:d2c78c3a3e01
277 | | parent: 5:876a60d024de
281 | | parent: 5:876a60d024de
278 | | user: test
282 | | user: test
279 | | date: Thu Jan 01 00:00:00 1970 +0000
283 | | date: Thu Jan 01 00:00:00 1970 +0000
280 | | summary: modified 3
284 | | summary: modified 3
281 | |
285 | |
282 o | changeset: 6:0c6523cb1d0f
286 o | changeset: 6:0c6523cb1d0f
283 |/ user: test
287 |/ user: test
284 | date: Thu Jan 01 00:00:00 1970 +0000
288 | date: Thu Jan 01 00:00:00 1970 +0000
285 | summary: modified 2
289 | summary: modified 2
286 |
290 |
287 o changeset: 5:876a60d024de
291 o changeset: 5:876a60d024de
288 | parent: 0:5cbade24e0fa
292 | parent: 0:5cbade24e0fa
289 | user: test
293 | user: test
290 | date: Thu Jan 01 00:00:00 1970 +0000
294 | date: Thu Jan 01 00:00:00 1970 +0000
291 | summary: modified 1
295 | summary: modified 1
292 |
296 |
293 o changeset: 0:5cbade24e0fa
297 o changeset: 0:5cbade24e0fa
294 user: test
298 user: test
295 date: Thu Jan 01 00:00:00 1970 +0000
299 date: Thu Jan 01 00:00:00 1970 +0000
296 summary: added
300 summary: added
297
301
298 $ hg phabsend -r 5::
302 $ hg phabsend -r 5::
299 abort: obsolete commits cannot be posted for review
303 abort: obsolete commits cannot be posted for review
300 [255]
304 [255]
301
305
302 Don't restack existing orphans
306 Don't restack existing orphans
303
307
304 $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
308 $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
305 876a60d024de mapped to old nodes ['876a60d024de']
309 876a60d024de mapped to old nodes ['876a60d024de']
306 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
310 0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
307 D8434 - updated - 876a60d024de: modified 1
311 D8434 - updated - 876a60d024de: modified 1
308 D8433 - updated - 0c6523cb1d0f: modified 2
312 D8433 - updated - 0c6523cb1d0f: modified 2
309 D8435 - created - 082be6c94150: modified A
313 D8435 - created - 082be6c94150: modified A
310 new commits: ['b5913193c805']
314 new commits: ['b5913193c805']
311 not restabilizing unchanged d2c78c3a3e01
315 not restabilizing unchanged d2c78c3a3e01
312 $ hg log -G
316 $ hg log -G
313 @ changeset: 11:b5913193c805
317 @ changeset: 11:b5913193c805
314 | tag: tip
318 | tag: tip
315 | parent: 6:0c6523cb1d0f
319 | parent: 6:0c6523cb1d0f
316 | user: test
320 | user: test
317 | date: Thu Jan 01 00:00:00 1970 +0000
321 | date: Thu Jan 01 00:00:00 1970 +0000
318 | summary: modified A
322 | summary: modified A
319 |
323 |
320 | * changeset: 9:a67643f48146
324 | * changeset: 9:a67643f48146
321 | | user: test
325 | | user: test
322 | | date: Thu Jan 01 00:00:00 1970 +0000
326 | | date: Thu Jan 01 00:00:00 1970 +0000
323 | | instability: orphan
327 | | instability: orphan
324 | | summary: modified B
328 | | summary: modified B
325 | |
329 | |
326 | x changeset: 8:db79727cb2f7
330 | x changeset: 8:db79727cb2f7
327 |/ parent: 6:0c6523cb1d0f
331 |/ parent: 6:0c6523cb1d0f
328 | user: test
332 | user: test
329 | date: Thu Jan 01 00:00:00 1970 +0000
333 | date: Thu Jan 01 00:00:00 1970 +0000
330 | obsolete: rewritten using amend, phabsend as 11:b5913193c805
334 | obsolete: rewritten using amend, phabsend as 11:b5913193c805
331 | summary: modified A
335 | summary: modified A
332 |
336 |
333 | o changeset: 7:d2c78c3a3e01
337 | o changeset: 7:d2c78c3a3e01
334 | | parent: 5:876a60d024de
338 | | parent: 5:876a60d024de
335 | | user: test
339 | | user: test
336 | | date: Thu Jan 01 00:00:00 1970 +0000
340 | | date: Thu Jan 01 00:00:00 1970 +0000
337 | | summary: modified 3
341 | | summary: modified 3
338 | |
342 | |
339 o | changeset: 6:0c6523cb1d0f
343 o | changeset: 6:0c6523cb1d0f
340 |/ user: test
344 |/ user: test
341 | date: Thu Jan 01 00:00:00 1970 +0000
345 | date: Thu Jan 01 00:00:00 1970 +0000
342 | summary: modified 2
346 | summary: modified 2
343 |
347 |
344 o changeset: 5:876a60d024de
348 o changeset: 5:876a60d024de
345 | parent: 0:5cbade24e0fa
349 | parent: 0:5cbade24e0fa
346 | user: test
350 | user: test
347 | date: Thu Jan 01 00:00:00 1970 +0000
351 | date: Thu Jan 01 00:00:00 1970 +0000
348 | summary: modified 1
352 | summary: modified 1
349 |
353 |
350 o changeset: 0:5cbade24e0fa
354 o changeset: 0:5cbade24e0fa
351 user: test
355 user: test
352 date: Thu Jan 01 00:00:00 1970 +0000
356 date: Thu Jan 01 00:00:00 1970 +0000
353 summary: added
357 summary: added
354
358
355 $ cd ..
359 $ cd ..
356
360
357 Phabesending a new binary, a modified binary, and a removed binary
361 Phabesending a new binary, a modified binary, and a removed binary
358
362
359 >>> open('bin', 'wb').write(b'\0a') and None
363 >>> open('bin', 'wb').write(b'\0a') and None
360 $ hg ci -Am 'add binary'
364 $ hg ci -Am 'add binary'
361 adding bin
365 adding bin
362 >>> open('bin', 'wb').write(b'\0b') and None
366 >>> open('bin', 'wb').write(b'\0b') and None
363 $ hg ci -m 'modify binary'
367 $ hg ci -m 'modify binary'
364 $ hg rm bin
368 $ hg rm bin
365 $ hg ci -m 'remove binary'
369 $ hg ci -m 'remove binary'
366 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
370 $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
367 uploading bin@aa24a81f55de
371 uploading bin@aa24a81f55de
368 D8007 - created - aa24a81f55de: add binary
372 D8007 - created - aa24a81f55de: add binary
369 uploading bin@d8d62a881b54
373 uploading bin@d8d62a881b54
370 D8008 - created - d8d62a881b54: modify binary
374 D8008 - created - d8d62a881b54: modify binary
371 D8009 - created - af55645b2e29: remove binary
375 D8009 - created - af55645b2e29: remove binary
372 new commits: ['b8139fbb4a57']
376 new commits: ['b8139fbb4a57']
373 new commits: ['c88ce4c2d2ad']
377 new commits: ['c88ce4c2d2ad']
374 new commits: ['75dbbc901145']
378 new commits: ['75dbbc901145']
375 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
379 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/aa24a81f55de-a3a0cf24-phabsend.hg
376
380
377 Phabsend a renamed binary and a copied binary, with and without content changes
381 Phabsend a renamed binary and a copied binary, with and without content changes
378 to src and dest
382 to src and dest
379
383
380 >>> open('bin2', 'wb').write(b'\0c') and None
384 >>> open('bin2', 'wb').write(b'\0c') and None
381 $ hg ci -Am 'add another binary'
385 $ hg ci -Am 'add another binary'
382 adding bin2
386 adding bin2
383
387
384 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
388 TODO: "bin2" can't be viewed in this commit (left or right side), and the URL
385 looks much different than when viewing "bin2_moved". No idea if this is a phab
389 looks much different than when viewing "bin2_moved". No idea if this is a phab
386 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
390 bug, or phabsend bug. The patch (as printed by phabread) look reasonable
387 though.
391 though.
388
392
389 $ hg mv bin2 bin2_moved
393 $ hg mv bin2 bin2_moved
390 $ hg ci -m "moved binary"
394 $ hg ci -m "moved binary"
391
395
392 Note: "bin2_moved" is also not viewable in phabricator with this review
396 Note: "bin2_moved" is also not viewable in phabricator with this review
393
397
394 $ hg cp bin2_moved bin2_copied
398 $ hg cp bin2_moved bin2_copied
395 $ hg ci -m "copied binary"
399 $ hg ci -m "copied binary"
396
400
397 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
401 Note: "bin2_moved_again" is marked binary in phabricator, and both sides of it
398 are viewable in their proper state. "bin2_copied" is not viewable, and not
402 are viewable in their proper state. "bin2_copied" is not viewable, and not
399 listed as binary in phabricator.
403 listed as binary in phabricator.
400
404
401 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
405 >>> open('bin2_copied', 'wb').write(b'\0move+mod') and None
402 $ hg mv bin2_copied bin2_moved_again
406 $ hg mv bin2_copied bin2_moved_again
403 $ hg ci -m "move+mod copied binary"
407 $ hg ci -m "move+mod copied binary"
404
408
405 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
409 Note: "bin2_moved" and "bin2_moved_copy" are both marked binary, and both
406 viewable on each side.
410 viewable on each side.
407
411
408 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
412 >>> open('bin2_moved', 'wb').write(b'\0precopy mod') and None
409 $ hg cp bin2_moved bin2_moved_copied
413 $ hg cp bin2_moved bin2_moved_copied
410 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
414 >>> open('bin2_moved', 'wb').write(b'\0copy src+mod') and None
411 $ hg ci -m "copy+mod moved binary"
415 $ hg ci -m "copy+mod moved binary"
412
416
413 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
417 $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
414 uploading bin2@f42f9195e00c
418 uploading bin2@f42f9195e00c
415 D8128 - created - f42f9195e00c: add another binary
419 D8128 - created - f42f9195e00c: add another binary
416 D8129 - created - 834ab31d80ae: moved binary
420 D8129 - created - 834ab31d80ae: moved binary
417 D8130 - created - 494b750e5194: copied binary
421 D8130 - created - 494b750e5194: copied binary
418 uploading bin2_moved_again@25f766b50cc2
422 uploading bin2_moved_again@25f766b50cc2
419 D8131 - created - 25f766b50cc2: move+mod copied binary
423 D8131 - created - 25f766b50cc2: move+mod copied binary
420 uploading bin2_moved_copied@1b87b363a5e4
424 uploading bin2_moved_copied@1b87b363a5e4
421 uploading bin2_moved@1b87b363a5e4
425 uploading bin2_moved@1b87b363a5e4
422 D8132 - created - 1b87b363a5e4: copy+mod moved binary
426 D8132 - created - 1b87b363a5e4: copy+mod moved binary
423 new commits: ['90437c20312a']
427 new commits: ['90437c20312a']
424 new commits: ['f391f4da4c61']
428 new commits: ['f391f4da4c61']
425 new commits: ['da86a9f3268c']
429 new commits: ['da86a9f3268c']
426 new commits: ['003ffc16ba66']
430 new commits: ['003ffc16ba66']
427 new commits: ['13bd750c36fa']
431 new commits: ['13bd750c36fa']
428 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
432 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f42f9195e00c-e82a0769-phabsend.hg
429
433
430 Phabreading a DREV with a local:commits time as a string:
434 Phabreading a DREV with a local:commits time as a string:
431 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
435 $ hg phabread --test-vcr "$VCR/phabread-str-time.json" D1285
432 # HG changeset patch
436 # HG changeset patch
433 # User Pulkit Goyal <7895pulkit@gmail.com>
437 # User Pulkit Goyal <7895pulkit@gmail.com>
434 # Date 1509404054 -19800
438 # Date 1509404054 -19800
435 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
439 # Node ID 44fc1c1f1774a76423b9c732af6938435099bcc5
436 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
440 # Parent 8feef8ef8389a3b544e0a74624f1efc3a8d85d35
437 repoview: add a new attribute _visibilityexceptions and related API
441 repoview: add a new attribute _visibilityexceptions and related API
438
442
439 Currently we don't have a defined way in core to make some hidden revisions
443 Currently we don't have a defined way in core to make some hidden revisions
440 visible in filtered repo. Extensions to achieve the purpose of unhiding some
444 visible in filtered repo. Extensions to achieve the purpose of unhiding some
441 hidden commits, wrap repoview.pinnedrevs() function.
445 hidden commits, wrap repoview.pinnedrevs() function.
442
446
443 To make the above task simple and have well defined API, this patch adds a new
447 To make the above task simple and have well defined API, this patch adds a new
444 attribute '_visibilityexceptions' to repoview class which will contains
448 attribute '_visibilityexceptions' to repoview class which will contains
445 the hidden revs which should be exception.
449 the hidden revs which should be exception.
446 This will allow to set different exceptions for different repoview objects
450 This will allow to set different exceptions for different repoview objects
447 backed by the same unfiltered repo.
451 backed by the same unfiltered repo.
448
452
449 This patch also adds API to add revs to the attribute set and get them.
453 This patch also adds API to add revs to the attribute set and get them.
450
454
451 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
455 Thanks to Jun for suggesting the use of repoview class instead of localrepo.
452
456
453 Differential Revision: https://phab.mercurial-scm.org/D1285
457 Differential Revision: https://phab.mercurial-scm.org/D1285
454 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
458 diff --git a/mercurial/repoview.py b/mercurial/repoview.py
455 --- a/mercurial/repoview.py
459 --- a/mercurial/repoview.py
456 +++ b/mercurial/repoview.py
460 +++ b/mercurial/repoview.py
457 @@ * @@ (glob)
461 @@ * @@ (glob)
458 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
462 subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`.
459 """
463 """
460
464
461 + # hidden revs which should be visible
465 + # hidden revs which should be visible
462 + _visibilityexceptions = set()
466 + _visibilityexceptions = set()
463 +
467 +
464 def __init__(self, repo, filtername):
468 def __init__(self, repo, filtername):
465 object.__setattr__(self, r'_unfilteredrepo', repo)
469 object.__setattr__(self, r'_unfilteredrepo', repo)
466 object.__setattr__(self, r'filtername', filtername)
470 object.__setattr__(self, r'filtername', filtername)
467 @@ -231,6 +234,14 @@
471 @@ -231,6 +234,14 @@
468 return self
472 return self
469 return self.unfiltered().filtered(name)
473 return self.unfiltered().filtered(name)
470
474
471 + def addvisibilityexceptions(self, revs):
475 + def addvisibilityexceptions(self, revs):
472 + """adds hidden revs which should be visible to set of exceptions"""
476 + """adds hidden revs which should be visible to set of exceptions"""
473 + self._visibilityexceptions.update(revs)
477 + self._visibilityexceptions.update(revs)
474 +
478 +
475 + def getvisibilityexceptions(self):
479 + def getvisibilityexceptions(self):
476 + """returns the set of hidden revs which should be visible"""
480 + """returns the set of hidden revs which should be visible"""
477 + return self._visibilityexceptions
481 + return self._visibilityexceptions
478 +
482 +
479 # everything access are forwarded to the proxied repo
483 # everything access are forwarded to the proxied repo
480 def __getattr__(self, attr):
484 def __getattr__(self, attr):
481 return getattr(self._unfilteredrepo, attr)
485 return getattr(self._unfilteredrepo, attr)
482 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
486 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
483 --- a/mercurial/localrepo.py
487 --- a/mercurial/localrepo.py
484 +++ b/mercurial/localrepo.py
488 +++ b/mercurial/localrepo.py
485 @@ -570,6 +570,14 @@
489 @@ -570,6 +570,14 @@
486 def close(self):
490 def close(self):
487 self._writecaches()
491 self._writecaches()
488
492
489 + def addvisibilityexceptions(self, exceptions):
493 + def addvisibilityexceptions(self, exceptions):
490 + # should be called on a filtered repository
494 + # should be called on a filtered repository
491 + pass
495 + pass
492 +
496 +
493 + def getvisibilityexceptions(self):
497 + def getvisibilityexceptions(self):
494 + # should be called on a filtered repository
498 + # should be called on a filtered repository
495 + return set()
499 + return set()
496 +
500 +
497 def _loadextensions(self):
501 def _loadextensions(self):
498 extensions.loadall(self.ui)
502 extensions.loadall(self.ui)
499
503
500
504
501 A bad .arcconfig doesn't error out
505 A bad .arcconfig doesn't error out
502 $ echo 'garbage' > .arcconfig
506 $ echo 'garbage' > .arcconfig
503 $ hg config phabricator --debug
507 $ hg config phabricator --debug
504 invalid JSON in $TESTTMP/repo/.arcconfig
508 invalid JSON in $TESTTMP/repo/.arcconfig
505 read config from: */.hgrc (glob)
509 read config from: */.hgrc (glob)
506 */.hgrc:*: phabricator.debug=True (glob)
510 */.hgrc:*: phabricator.debug=True (glob)
507 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
511 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=https://phab.mercurial-scm.org/ (glob)
508 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
512 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=HG (glob)
509
513
510 The .arcconfig content overrides global config
514 The .arcconfig content overrides global config
511 $ cat >> $HGRCPATH << EOF
515 $ cat >> $HGRCPATH << EOF
512 > [phabricator]
516 > [phabricator]
513 > url = global
517 > url = global
514 > callsign = global
518 > callsign = global
515 > EOF
519 > EOF
516 $ cp $TESTDIR/../.arcconfig .
520 $ cp $TESTDIR/../.arcconfig .
517 $ mv .hg/hgrc .hg/hgrc.bak
521 $ mv .hg/hgrc .hg/hgrc.bak
518 $ hg config phabricator --debug
522 $ hg config phabricator --debug
519 read config from: */.hgrc (glob)
523 read config from: */.hgrc (glob)
520 */.hgrc:*: phabricator.debug=True (glob)
524 */.hgrc:*: phabricator.debug=True (glob)
521 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
525 $TESTTMP/repo/.arcconfig: phabricator.callsign=HG
522 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
526 $TESTTMP/repo/.arcconfig: phabricator.url=https://phab.mercurial-scm.org/
523
527
524 But it doesn't override local config
528 But it doesn't override local config
525 $ cat >> .hg/hgrc << EOF
529 $ cat >> .hg/hgrc << EOF
526 > [phabricator]
530 > [phabricator]
527 > url = local
531 > url = local
528 > callsign = local
532 > callsign = local
529 > EOF
533 > EOF
530 $ hg config phabricator --debug
534 $ hg config phabricator --debug
531 read config from: */.hgrc (glob)
535 read config from: */.hgrc (glob)
532 */.hgrc:*: phabricator.debug=True (glob)
536 */.hgrc:*: phabricator.debug=True (glob)
533 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
537 $TESTTMP/repo/.hg/hgrc:*: phabricator.url=local (glob)
534 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
538 $TESTTMP/repo/.hg/hgrc:*: phabricator.callsign=local (glob)
535 $ mv .hg/hgrc.bak .hg/hgrc
539 $ mv .hg/hgrc.bak .hg/hgrc
536
540
537 Phabimport works with a stack
541 Phabimport works with a stack
538
542
539 $ cd ..
543 $ cd ..
540 $ hg clone repo repo2 -qr 1
544 $ hg clone repo repo2 -qr 1
541 $ cp repo/.hg/hgrc repo2/.hg/
545 $ cp repo/.hg/hgrc repo2/.hg/
542 $ cd repo2
546 $ cd repo2
543 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
547 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json"
544 applying patch from D7917
548 applying patch from D7917
545 applying patch from D7918
549 applying patch from D7918
546 $ hg log -r .: -G -Tcompact
550 $ hg log -r .: -G -Tcompact
547 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
551 o 3[tip] aaef04066140 1970-01-01 00:00 +0000 test
548 | create draft change for phabricator testing
552 | create draft change for phabricator testing
549 |
553 |
550 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
554 o 2 8de3712202d1 1970-01-01 00:00 +0000 test
551 | create public change for phabricator testing
555 | create public change for phabricator testing
552 |
556 |
553 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
557 @ 1 a692622e6937 1970-01-01 00:00 +0000 test
554 | create beta for phabricator test
558 | create beta for phabricator test
555 ~
559 ~
556 Phabimport can create secret commits
560 Phabimport can create secret commits
557
561
558 $ hg rollback --config ui.rollback=True
562 $ hg rollback --config ui.rollback=True
559 repository tip rolled back to revision 1 (undo phabimport)
563 repository tip rolled back to revision 1 (undo phabimport)
560 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
564 $ hg phabimport --stack 'D7918' --test-vcr "$VCR/phabimport-stack.json" \
561 > --config phabimport.secret=True
565 > --config phabimport.secret=True
562 applying patch from D7917
566 applying patch from D7917
563 applying patch from D7918
567 applying patch from D7918
564 $ hg log -r 'reverse(.:)' -T phases
568 $ hg log -r 'reverse(.:)' -T phases
565 changeset: 3:aaef04066140
569 changeset: 3:aaef04066140
566 tag: tip
570 tag: tip
567 phase: secret
571 phase: secret
568 user: test
572 user: test
569 date: Thu Jan 01 00:00:00 1970 +0000
573 date: Thu Jan 01 00:00:00 1970 +0000
570 summary: create draft change for phabricator testing
574 summary: create draft change for phabricator testing
571
575
572 changeset: 2:8de3712202d1
576 changeset: 2:8de3712202d1
573 phase: secret
577 phase: secret
574 user: test
578 user: test
575 date: Thu Jan 01 00:00:00 1970 +0000
579 date: Thu Jan 01 00:00:00 1970 +0000
576 summary: create public change for phabricator testing
580 summary: create public change for phabricator testing
577
581
578 changeset: 1:a692622e6937
582 changeset: 1:a692622e6937
579 phase: public
583 phase: public
580 user: test
584 user: test
581 date: Thu Jan 01 00:00:00 1970 +0000
585 date: Thu Jan 01 00:00:00 1970 +0000
582 summary: create beta for phabricator test
586 summary: create beta for phabricator test
583
587
584 Phabimport accepts multiple DREVSPECs
588 Phabimport accepts multiple DREVSPECs
585
589
586 $ hg rollback --config ui.rollback=True
590 $ hg rollback --config ui.rollback=True
587 repository tip rolled back to revision 1 (undo phabimport)
591 repository tip rolled back to revision 1 (undo phabimport)
588 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
592 $ hg phabimport --no-stack D7917 D7918 --test-vcr "$VCR/phabimport-multi-drev.json"
589 applying patch from D7917
593 applying patch from D7917
590 applying patch from D7918
594 applying patch from D7918
591
595
592 Phabsend requires a linear range of commits
596 Phabsend requires a linear range of commits
593
597
594 $ hg phabsend -r 0+2+3
598 $ hg phabsend -r 0+2+3
595 abort: cannot phabsend multiple head revisions: c44b38f24a45
599 abort: cannot phabsend multiple head revisions: c44b38f24a45
596 (the revisions must form a linear chain)
600 (the revisions must form a linear chain)
597 [255]
601 [255]
598
602
599 Validate arguments with --fold
603 Validate arguments with --fold
600
604
601 $ hg phabsend --fold -r 1
605 $ hg phabsend --fold -r 1
602 abort: cannot fold a single revision
606 abort: cannot fold a single revision
603 [255]
607 [255]
604 $ hg phabsend --fold --no-amend -r 1::
608 $ hg phabsend --fold --no-amend -r 1::
605 abort: cannot fold with --no-amend
609 abort: cannot fold with --no-amend
606 [255]
610 [255]
607 $ hg phabsend --fold -r 1::
611 $ hg phabsend --fold -r 1::
608 abort: cannot fold revisions with different DREV values
612 abort: cannot fold revisions with different DREV values
609 [255]
613 [255]
610
614
611 Setup a series of commits to be folded, and include the Test Plan field multiple
615 Setup a series of commits to be folded, and include the Test Plan field multiple
612 times to test the concatenation logic. No Test Plan field in the last one to
616 times to test the concatenation logic. No Test Plan field in the last one to
613 ensure missing fields are skipped.
617 ensure missing fields are skipped.
614
618
615 $ hg init ../folded
619 $ hg init ../folded
616 $ cd ../folded
620 $ cd ../folded
617 $ cat >> .hg/hgrc <<EOF
621 $ cat >> .hg/hgrc <<EOF
618 > [phabricator]
622 > [phabricator]
619 > url = https://phab.mercurial-scm.org/
623 > url = https://phab.mercurial-scm.org/
620 > callsign = HG
624 > callsign = HG
621 > EOF
625 > EOF
622
626
623 $ echo 'added' > file.txt
627 $ echo 'added' > file.txt
624 $ hg ci -Aqm 'added file'
628 $ hg ci -Aqm 'added file'
625
629
626 $ cat > log.txt <<EOF
630 $ cat > log.txt <<EOF
627 > one: first commit to review
631 > one: first commit to review
628 >
632 >
629 > This file was modified with 'mod1' as its contents.
633 > This file was modified with 'mod1' as its contents.
630 >
634 >
631 > Test Plan:
635 > Test Plan:
632 > LOL! What testing?!
636 > LOL! What testing?!
633 > EOF
637 > EOF
634 $ echo mod1 > file.txt
638 $ echo mod1 > file.txt
635 $ hg ci -l log.txt
639 $ hg ci -l log.txt
636
640
637 $ cat > log.txt <<EOF
641 $ cat > log.txt <<EOF
638 > two: second commit to review
642 > two: second commit to review
639 >
643 >
640 > This file was modified with 'mod2' as its contents.
644 > This file was modified with 'mod2' as its contents.
641 >
645 >
642 > Test Plan:
646 > Test Plan:
643 > Haha! yeah, right.
647 > Haha! yeah, right.
644 >
648 >
645 > EOF
649 > EOF
646 $ echo mod2 > file.txt
650 $ echo mod2 > file.txt
647 $ hg ci -l log.txt
651 $ hg ci -l log.txt
648
652
649 $ echo mod3 > file.txt
653 $ echo mod3 > file.txt
650 $ hg ci -m '3: a commit with no detailed message'
654 $ hg ci -m '3: a commit with no detailed message'
651
655
652 The folding of immutable commits works...
656 The folding of immutable commits works...
653
657
654 $ hg phase -r tip --public
658 $ hg phase -r tip --public
655 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
659 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
656 D8386 - created - a959a3f69d8d: one: first commit to review
660 D8386 - created - a959a3f69d8d: one: first commit to review
657 D8386 - created - 24a4438154ba: two: second commit to review
661 D8386 - created - 24a4438154ba: two: second commit to review
658 D8386 - created - d235829e802c: 3: a commit with no detailed message
662 D8386 - created - d235829e802c: 3: a commit with no detailed message
659 warning: not updating public commit 1:a959a3f69d8d
663 warning: not updating public commit 1:a959a3f69d8d
660 warning: not updating public commit 2:24a4438154ba
664 warning: not updating public commit 2:24a4438154ba
661 warning: not updating public commit 3:d235829e802c
665 warning: not updating public commit 3:d235829e802c
662 no newnodes to update
666 no newnodes to update
663
667
664 $ hg phase -r 0 --draft --force
668 $ hg phase -r 0 --draft --force
665
669
666 ... as does the initial mutable fold...
670 ... as does the initial mutable fold...
667
671
668 $ echo y | hg phabsend --fold --confirm -r 1:: \
672 $ echo y | hg phabsend --fold --confirm -r 1:: \
669 > --test-vcr "$VCR/phabsend-fold-initial.json"
673 > --test-vcr "$VCR/phabsend-fold-initial.json"
670 NEW - a959a3f69d8d: one: first commit to review
674 NEW - a959a3f69d8d: one: first commit to review
671 NEW - 24a4438154ba: two: second commit to review
675 NEW - 24a4438154ba: two: second commit to review
672 NEW - d235829e802c: 3: a commit with no detailed message
676 NEW - d235829e802c: 3: a commit with no detailed message
673 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
677 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
674 D8387 - created - a959a3f69d8d: one: first commit to review
678 D8387 - created - a959a3f69d8d: one: first commit to review
675 D8387 - created - 24a4438154ba: two: second commit to review
679 D8387 - created - 24a4438154ba: two: second commit to review
676 D8387 - created - d235829e802c: 3: a commit with no detailed message
680 D8387 - created - d235829e802c: 3: a commit with no detailed message
677 updating local commit list for D8387
681 updating local commit list for D8387
678 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
682 new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
679 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
683 saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
680
684
681 ... and doesn't mangle the local commits.
685 ... and doesn't mangle the local commits.
682
686
683 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
687 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
684 3:921f8265efbd
688 3:921f8265efbd
685 3: a commit with no detailed message
689 3: a commit with no detailed message
686
690
687 Differential Revision: https://phab.mercurial-scm.org/D8387
691 Differential Revision: https://phab.mercurial-scm.org/D8387
688 2:832553266fe8
692 2:832553266fe8
689 two: second commit to review
693 two: second commit to review
690
694
691 This file was modified with 'mod2' as its contents.
695 This file was modified with 'mod2' as its contents.
692
696
693 Test Plan:
697 Test Plan:
694 Haha! yeah, right.
698 Haha! yeah, right.
695
699
696 Differential Revision: https://phab.mercurial-scm.org/D8387
700 Differential Revision: https://phab.mercurial-scm.org/D8387
697 1:602c4e738243
701 1:602c4e738243
698 one: first commit to review
702 one: first commit to review
699
703
700 This file was modified with 'mod1' as its contents.
704 This file was modified with 'mod1' as its contents.
701
705
702 Test Plan:
706 Test Plan:
703 LOL! What testing?!
707 LOL! What testing?!
704
708
705 Differential Revision: https://phab.mercurial-scm.org/D8387
709 Differential Revision: https://phab.mercurial-scm.org/D8387
706 0:98d480e0d494
710 0:98d480e0d494
707 added file
711 added file
708
712
709 Setup some obsmarkers by adding a file to the middle commit. This stress tests
713 Setup some obsmarkers by adding a file to the middle commit. This stress tests
710 getoldnodedrevmap() in later phabsends.
714 getoldnodedrevmap() in later phabsends.
711
715
712 $ hg up '.^'
716 $ hg up '.^'
713 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
717 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
714 $ echo 'modified' > file2.txt
718 $ echo 'modified' > file2.txt
715 $ hg add file2.txt
719 $ hg add file2.txt
716 $ hg amend --config experimental.evolution=all --config extensions.amend=
720 $ hg amend --config experimental.evolution=all --config extensions.amend=
717 1 new orphan changesets
721 1 new orphan changesets
718 $ hg up 3
722 $ hg up 3
719 obsolete feature not enabled but 1 markers found!
723 obsolete feature not enabled but 1 markers found!
720 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
724 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
721 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
725 $ hg rebase --config experimental.evolution=all --config extensions.rebase=
722 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
726 note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
723 rebasing 3:921f8265efbd "3: a commit with no detailed message"
727 rebasing 3:921f8265efbd "3: a commit with no detailed message"
724
728
725 When commits have changed locally, the local commit list on Phabricator is
729 When commits have changed locally, the local commit list on Phabricator is
726 updated.
730 updated.
727
731
728 $ echo y | hg phabsend --fold --confirm -r 1:: \
732 $ echo y | hg phabsend --fold --confirm -r 1:: \
729 > --test-vcr "$VCR/phabsend-fold-updated.json"
733 > --test-vcr "$VCR/phabsend-fold-updated.json"
730 obsolete feature not enabled but 2 markers found!
734 obsolete feature not enabled but 2 markers found!
731 602c4e738243 mapped to old nodes ['602c4e738243']
735 602c4e738243 mapped to old nodes ['602c4e738243']
732 0124e5474c88 mapped to old nodes ['832553266fe8']
736 0124e5474c88 mapped to old nodes ['832553266fe8']
733 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
737 e4edb1fe3565 mapped to old nodes ['921f8265efbd']
734 D8387 - 602c4e738243: one: first commit to review
738 D8387 - 602c4e738243: one: first commit to review
735 D8387 - 0124e5474c88: two: second commit to review
739 D8387 - 0124e5474c88: two: second commit to review
736 D8387 - e4edb1fe3565: 3: a commit with no detailed message
740 D8387 - e4edb1fe3565: 3: a commit with no detailed message
737 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
741 Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
738 D8387 - updated - 602c4e738243: one: first commit to review
742 D8387 - updated - 602c4e738243: one: first commit to review
739 D8387 - updated - 0124e5474c88: two: second commit to review
743 D8387 - updated - 0124e5474c88: two: second commit to review
740 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
744 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
741 obsolete feature not enabled but 2 markers found! (?)
745 obsolete feature not enabled but 2 markers found! (?)
742 updating local commit list for D8387
746 updating local commit list for D8387
743 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
747 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
744 $ hg log -Tcompact
748 $ hg log -Tcompact
745 obsolete feature not enabled but 2 markers found!
749 obsolete feature not enabled but 2 markers found!
746 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
750 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
747 3: a commit with no detailed message
751 3: a commit with no detailed message
748
752
749 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
753 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
750 two: second commit to review
754 two: second commit to review
751
755
752 1 602c4e738243 1970-01-01 00:00 +0000 test
756 1 602c4e738243 1970-01-01 00:00 +0000 test
753 one: first commit to review
757 one: first commit to review
754
758
755 0 98d480e0d494 1970-01-01 00:00 +0000 test
759 0 98d480e0d494 1970-01-01 00:00 +0000 test
756 added file
760 added file
757
761
758 When nothing has changed locally since the last phabsend, the commit list isn't
762 When nothing has changed locally since the last phabsend, the commit list isn't
759 updated, and nothing is changed locally afterward.
763 updated, and nothing is changed locally afterward.
760
764
761 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
765 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-no-changes.json"
762 obsolete feature not enabled but 2 markers found!
766 obsolete feature not enabled but 2 markers found!
763 602c4e738243 mapped to old nodes ['602c4e738243']
767 602c4e738243 mapped to old nodes ['602c4e738243']
764 0124e5474c88 mapped to old nodes ['0124e5474c88']
768 0124e5474c88 mapped to old nodes ['0124e5474c88']
765 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
769 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
766 D8387 - updated - 602c4e738243: one: first commit to review
770 D8387 - updated - 602c4e738243: one: first commit to review
767 D8387 - updated - 0124e5474c88: two: second commit to review
771 D8387 - updated - 0124e5474c88: two: second commit to review
768 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
772 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
769 obsolete feature not enabled but 2 markers found! (?)
773 obsolete feature not enabled but 2 markers found! (?)
770 local commit list for D8387 is already up-to-date
774 local commit list for D8387 is already up-to-date
771 $ hg log -Tcompact
775 $ hg log -Tcompact
772 obsolete feature not enabled but 2 markers found!
776 obsolete feature not enabled but 2 markers found!
773 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
777 5[tip] e4edb1fe3565 1970-01-01 00:00 +0000 test
774 3: a commit with no detailed message
778 3: a commit with no detailed message
775
779
776 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
780 4:1 0124e5474c88 1970-01-01 00:00 +0000 test
777 two: second commit to review
781 two: second commit to review
778
782
779 1 602c4e738243 1970-01-01 00:00 +0000 test
783 1 602c4e738243 1970-01-01 00:00 +0000 test
780 one: first commit to review
784 one: first commit to review
781
785
782 0 98d480e0d494 1970-01-01 00:00 +0000 test
786 0 98d480e0d494 1970-01-01 00:00 +0000 test
783 added file
787 added file
784
788
785 Fold will accept new revisions at the end...
789 Fold will accept new revisions at the end...
786
790
787 $ echo 'another mod' > file2.txt
791 $ echo 'another mod' > file2.txt
788 $ hg ci -m 'four: extend the fold range'
792 $ hg ci -m 'four: extend the fold range'
789 obsolete feature not enabled but 2 markers found!
793 obsolete feature not enabled but 2 markers found!
790 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
794 $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-extend-end.json" \
791 > --config experimental.evolution=all
795 > --config experimental.evolution=all
792 602c4e738243 mapped to old nodes ['602c4e738243']
796 602c4e738243 mapped to old nodes ['602c4e738243']
793 0124e5474c88 mapped to old nodes ['0124e5474c88']
797 0124e5474c88 mapped to old nodes ['0124e5474c88']
794 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
798 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
795 D8387 - updated - 602c4e738243: one: first commit to review
799 D8387 - updated - 602c4e738243: one: first commit to review
796 D8387 - updated - 0124e5474c88: two: second commit to review
800 D8387 - updated - 0124e5474c88: two: second commit to review
797 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
801 D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
798 D8387 - created - 94aaae213b23: four: extend the fold range
802 D8387 - created - 94aaae213b23: four: extend the fold range
799 updating local commit list for D8387
803 updating local commit list for D8387
800 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
804 new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
801 $ hg log -r . -T '{desc}\n'
805 $ hg log -r . -T '{desc}\n'
802 four: extend the fold range
806 four: extend the fold range
803
807
804 Differential Revision: https://phab.mercurial-scm.org/D8387
808 Differential Revision: https://phab.mercurial-scm.org/D8387
805 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
809 $ hg log -T'{rev} {if(phabreview, "{phabreview.url} {phabreview.id}")}\n' -r 1::
806 obsolete feature not enabled but 3 markers found!
810 obsolete feature not enabled but 3 markers found!
807 1 https://phab.mercurial-scm.org/D8387 D8387
811 1 https://phab.mercurial-scm.org/D8387 D8387
808 4 https://phab.mercurial-scm.org/D8387 D8387
812 4 https://phab.mercurial-scm.org/D8387 D8387
809 5 https://phab.mercurial-scm.org/D8387 D8387
813 5 https://phab.mercurial-scm.org/D8387 D8387
810 7 https://phab.mercurial-scm.org/D8387 D8387
814 7 https://phab.mercurial-scm.org/D8387 D8387
811
815
812 ... and also accepts new revisions at the beginning of the range
816 ... and also accepts new revisions at the beginning of the range
813
817
814 It's a bit unfortunate that not having a Differential URL on the first commit
818 It's a bit unfortunate that not having a Differential URL on the first commit
815 causes a new Differential Revision to be created, though it isn't *entirely*
819 causes a new Differential Revision to be created, though it isn't *entirely*
816 unreasonable. At least this updates the subsequent commits.
820 unreasonable. At least this updates the subsequent commits.
817
821
818 TODO: See if it can reuse the existing Differential.
822 TODO: See if it can reuse the existing Differential.
819
823
820 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
824 $ hg phabsend --fold -r 0:: --test-vcr "$VCR/phabsend-fold-extend-front.json" \
821 > --config experimental.evolution=all
825 > --config experimental.evolution=all
822 602c4e738243 mapped to old nodes ['602c4e738243']
826 602c4e738243 mapped to old nodes ['602c4e738243']
823 0124e5474c88 mapped to old nodes ['0124e5474c88']
827 0124e5474c88 mapped to old nodes ['0124e5474c88']
824 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
828 e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
825 51a04fea8707 mapped to old nodes ['51a04fea8707']
829 51a04fea8707 mapped to old nodes ['51a04fea8707']
826 D8388 - created - 98d480e0d494: added file
830 D8388 - created - 98d480e0d494: added file
827 D8388 - updated - 602c4e738243: one: first commit to review
831 D8388 - updated - 602c4e738243: one: first commit to review
828 D8388 - updated - 0124e5474c88: two: second commit to review
832 D8388 - updated - 0124e5474c88: two: second commit to review
829 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
833 D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
830 D8388 - updated - 51a04fea8707: four: extend the fold range
834 D8388 - updated - 51a04fea8707: four: extend the fold range
831 updating local commit list for D8388
835 updating local commit list for D8388
832 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
836 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
833
837
834 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
838 $ hg log -T '{rev}:{node|short}\n{indent(desc, " ")}\n'
835 obsolete feature not enabled but 8 markers found!
839 obsolete feature not enabled but 8 markers found!
836 12:ac7db67f0991
840 12:ac7db67f0991
837 four: extend the fold range
841 four: extend the fold range
838
842
839 Differential Revision: https://phab.mercurial-scm.org/D8388
843 Differential Revision: https://phab.mercurial-scm.org/D8388
840 11:30682b960804
844 11:30682b960804
841 3: a commit with no detailed message
845 3: a commit with no detailed message
842
846
843 Differential Revision: https://phab.mercurial-scm.org/D8388
847 Differential Revision: https://phab.mercurial-scm.org/D8388
844 10:3ee132d41dbc
848 10:3ee132d41dbc
845 two: second commit to review
849 two: second commit to review
846
850
847 This file was modified with 'mod2' as its contents.
851 This file was modified with 'mod2' as its contents.
848
852
849 Test Plan:
853 Test Plan:
850 Haha! yeah, right.
854 Haha! yeah, right.
851
855
852 Differential Revision: https://phab.mercurial-scm.org/D8388
856 Differential Revision: https://phab.mercurial-scm.org/D8388
853 9:6320b7d714cf
857 9:6320b7d714cf
854 one: first commit to review
858 one: first commit to review
855
859
856 This file was modified with 'mod1' as its contents.
860 This file was modified with 'mod1' as its contents.
857
861
858 Test Plan:
862 Test Plan:
859 LOL! What testing?!
863 LOL! What testing?!
860
864
861 Differential Revision: https://phab.mercurial-scm.org/D8388
865 Differential Revision: https://phab.mercurial-scm.org/D8388
862 8:15e9b14b4b4c
866 8:15e9b14b4b4c
863 added file
867 added file
864
868
865 Differential Revision: https://phab.mercurial-scm.org/D8388
869 Differential Revision: https://phab.mercurial-scm.org/D8388
866
870
867 Test phabsend --fold with an `hg split` at the end of the range
871 Test phabsend --fold with an `hg split` at the end of the range
868
872
869 $ echo foo > file3.txt
873 $ echo foo > file3.txt
870 $ hg add file3.txt
874 $ hg add file3.txt
871
875
872 $ hg log -r . -T '{desc}' > log.txt
876 $ hg log -r . -T '{desc}' > log.txt
873 $ echo 'amended mod' > file2.txt
877 $ echo 'amended mod' > file2.txt
874 $ hg ci --amend -l log.txt --config experimental.evolution=all
878 $ hg ci --amend -l log.txt --config experimental.evolution=all
875
879
876 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
880 $ cat <<EOF | hg --config extensions.split= --config ui.interactive=True \
877 > --config experimental.evolution=all split -r .
881 > --config experimental.evolution=all split -r .
878 > n
882 > n
879 > y
883 > y
880 > y
884 > y
881 > y
885 > y
882 > y
886 > y
883 > EOF
887 > EOF
884 diff --git a/file2.txt b/file2.txt
888 diff --git a/file2.txt b/file2.txt
885 1 hunks, 1 lines changed
889 1 hunks, 1 lines changed
886 examine changes to 'file2.txt'?
890 examine changes to 'file2.txt'?
887 (enter ? for help) [Ynesfdaq?] n
891 (enter ? for help) [Ynesfdaq?] n
888
892
889 diff --git a/file3.txt b/file3.txt
893 diff --git a/file3.txt b/file3.txt
890 new file mode 100644
894 new file mode 100644
891 examine changes to 'file3.txt'?
895 examine changes to 'file3.txt'?
892 (enter ? for help) [Ynesfdaq?] y
896 (enter ? for help) [Ynesfdaq?] y
893
897
894 @@ -0,0 +1,1 @@
898 @@ -0,0 +1,1 @@
895 +foo
899 +foo
896 record change 2/2 to 'file3.txt'?
900 record change 2/2 to 'file3.txt'?
897 (enter ? for help) [Ynesfdaq?] y
901 (enter ? for help) [Ynesfdaq?] y
898
902
899 created new head
903 created new head
900 diff --git a/file2.txt b/file2.txt
904 diff --git a/file2.txt b/file2.txt
901 1 hunks, 1 lines changed
905 1 hunks, 1 lines changed
902 examine changes to 'file2.txt'?
906 examine changes to 'file2.txt'?
903 (enter ? for help) [Ynesfdaq?] y
907 (enter ? for help) [Ynesfdaq?] y
904
908
905 @@ -1,1 +1,1 @@
909 @@ -1,1 +1,1 @@
906 -modified
910 -modified
907 +amended mod
911 +amended mod
908 record this change to 'file2.txt'?
912 record this change to 'file2.txt'?
909 (enter ? for help) [Ynesfdaq?] y
913 (enter ? for help) [Ynesfdaq?] y
910
914
911 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
915 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-split-end.json" \
912 > --config experimental.evolution=all
916 > --config experimental.evolution=all
913 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
917 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
914 6320b7d714cf mapped to old nodes ['6320b7d714cf']
918 6320b7d714cf mapped to old nodes ['6320b7d714cf']
915 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
919 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
916 30682b960804 mapped to old nodes ['30682b960804']
920 30682b960804 mapped to old nodes ['30682b960804']
917 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
921 6bc15dc99efd mapped to old nodes ['ac7db67f0991']
918 b50946d5e490 mapped to old nodes ['ac7db67f0991']
922 b50946d5e490 mapped to old nodes ['ac7db67f0991']
919 D8388 - updated - 15e9b14b4b4c: added file
923 D8388 - updated - 15e9b14b4b4c: added file
920 D8388 - updated - 6320b7d714cf: one: first commit to review
924 D8388 - updated - 6320b7d714cf: one: first commit to review
921 D8388 - updated - 3ee132d41dbc: two: second commit to review
925 D8388 - updated - 3ee132d41dbc: two: second commit to review
922 D8388 - updated - 30682b960804: 3: a commit with no detailed message
926 D8388 - updated - 30682b960804: 3: a commit with no detailed message
923 D8388 - updated - 6bc15dc99efd: four: extend the fold range
927 D8388 - updated - 6bc15dc99efd: four: extend the fold range
924 D8388 - updated - b50946d5e490: four: extend the fold range
928 D8388 - updated - b50946d5e490: four: extend the fold range
925 updating local commit list for D8388
929 updating local commit list for D8388
926 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
930 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
927
931
928 Test phabsend --fold with an `hg fold` at the end of the range
932 Test phabsend --fold with an `hg fold` at the end of the range
929
933
930 $ hg --config experimental.evolution=all --config extensions.rebase= \
934 $ hg --config experimental.evolution=all --config extensions.rebase= \
931 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
935 > rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
932 rebasing 14:6bc15dc99efd "four: extend the fold range"
936 rebasing 14:6bc15dc99efd "four: extend the fold range"
933 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
937 rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
934
938
935 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
939 $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
936 > --config experimental.evolution=all
940 > --config experimental.evolution=all
937 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
941 15e9b14b4b4c mapped to old nodes ['15e9b14b4b4c']
938 6320b7d714cf mapped to old nodes ['6320b7d714cf']
942 6320b7d714cf mapped to old nodes ['6320b7d714cf']
939 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
943 3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
940 30682b960804 mapped to old nodes ['30682b960804']
944 30682b960804 mapped to old nodes ['30682b960804']
941 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
945 e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
942 D8388 - updated - 15e9b14b4b4c: added file
946 D8388 - updated - 15e9b14b4b4c: added file
943 D8388 - updated - 6320b7d714cf: one: first commit to review
947 D8388 - updated - 6320b7d714cf: one: first commit to review
944 D8388 - updated - 3ee132d41dbc: two: second commit to review
948 D8388 - updated - 3ee132d41dbc: two: second commit to review
945 D8388 - updated - 30682b960804: 3: a commit with no detailed message
949 D8388 - updated - 30682b960804: 3: a commit with no detailed message
946 D8388 - updated - e919cdf3d4fe: four: extend the fold range
950 D8388 - updated - e919cdf3d4fe: four: extend the fold range
947 updating local commit list for D8388
951 updating local commit list for D8388
948 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
952 new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
949
953
950 $ hg log -r tip -v
954 $ hg log -r tip -v
951 obsolete feature not enabled but 12 markers found!
955 obsolete feature not enabled but 12 markers found!
952 changeset: 16:e919cdf3d4fe
956 changeset: 16:e919cdf3d4fe
953 tag: tip
957 tag: tip
954 parent: 11:30682b960804
958 parent: 11:30682b960804
955 user: test
959 user: test
956 date: Thu Jan 01 00:00:00 1970 +0000
960 date: Thu Jan 01 00:00:00 1970 +0000
957 files: file2.txt file3.txt
961 files: file2.txt file3.txt
958 description:
962 description:
959 four: extend the fold range
963 four: extend the fold range
960
964
961 Differential Revision: https://phab.mercurial-scm.org/D8388
965 Differential Revision: https://phab.mercurial-scm.org/D8388
962
966
963
967
964
968
965 $ cd ..
969 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now