##// END OF EJS Templates
logcmdutil: drop default arguments from changesetdisplayer/templater() calls
Yuya Nishihara -
r35972:386c1e45 default
parent child Browse files
Show More
@@ -1,1125 +1,1124 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2011-4 Jim Hague <jim.hague@acm.org>
4 # Copyright 2011-4 Jim Hague <jim.hague@acm.org>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''hooks for integrating with the Bugzilla bug tracker
9 '''hooks for integrating with the Bugzilla bug tracker
10
10
11 This hook extension adds comments on bugs in Bugzilla when changesets
11 This hook extension adds comments on bugs in Bugzilla when changesets
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
13 the Mercurial template mechanism.
13 the Mercurial template mechanism.
14
14
15 The bug references can optionally include an update for Bugzilla of the
15 The bug references can optionally include an update for Bugzilla of the
16 hours spent working on the bug. Bugs can also be marked fixed.
16 hours spent working on the bug. Bugs can also be marked fixed.
17
17
18 Four basic modes of access to Bugzilla are provided:
18 Four basic modes of access to Bugzilla are provided:
19
19
20 1. Access via the Bugzilla REST-API. Requires bugzilla 5.0 or later.
20 1. Access via the Bugzilla REST-API. Requires bugzilla 5.0 or later.
21
21
22 2. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
22 2. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
23
23
24 3. Check data via the Bugzilla XMLRPC interface and submit bug change
24 3. Check data via the Bugzilla XMLRPC interface and submit bug change
25 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
25 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
26
26
27 4. Writing directly to the Bugzilla database. Only Bugzilla installations
27 4. Writing directly to the Bugzilla database. Only Bugzilla installations
28 using MySQL are supported. Requires Python MySQLdb.
28 using MySQL are supported. Requires Python MySQLdb.
29
29
30 Writing directly to the database is susceptible to schema changes, and
30 Writing directly to the database is susceptible to schema changes, and
31 relies on a Bugzilla contrib script to send out bug change
31 relies on a Bugzilla contrib script to send out bug change
32 notification emails. This script runs as the user running Mercurial,
32 notification emails. This script runs as the user running Mercurial,
33 must be run on the host with the Bugzilla install, and requires
33 must be run on the host with the Bugzilla install, and requires
34 permission to read Bugzilla configuration details and the necessary
34 permission to read Bugzilla configuration details and the necessary
35 MySQL user and password to have full access rights to the Bugzilla
35 MySQL user and password to have full access rights to the Bugzilla
36 database. For these reasons this access mode is now considered
36 database. For these reasons this access mode is now considered
37 deprecated, and will not be updated for new Bugzilla versions going
37 deprecated, and will not be updated for new Bugzilla versions going
38 forward. Only adding comments is supported in this access mode.
38 forward. Only adding comments is supported in this access mode.
39
39
40 Access via XMLRPC needs a Bugzilla username and password to be specified
40 Access via XMLRPC needs a Bugzilla username and password to be specified
41 in the configuration. Comments are added under that username. Since the
41 in the configuration. Comments are added under that username. Since the
42 configuration must be readable by all Mercurial users, it is recommended
42 configuration must be readable by all Mercurial users, it is recommended
43 that the rights of that user are restricted in Bugzilla to the minimum
43 that the rights of that user are restricted in Bugzilla to the minimum
44 necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
44 necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
45
45
46 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
46 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
47 email to the Bugzilla email interface to submit comments to bugs.
47 email to the Bugzilla email interface to submit comments to bugs.
48 The From: address in the email is set to the email address of the Mercurial
48 The From: address in the email is set to the email address of the Mercurial
49 user, so the comment appears to come from the Mercurial user. In the event
49 user, so the comment appears to come from the Mercurial user. In the event
50 that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
50 that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
51 user, the email associated with the Bugzilla username used to log into
51 user, the email associated with the Bugzilla username used to log into
52 Bugzilla is used instead as the source of the comment. Marking bugs fixed
52 Bugzilla is used instead as the source of the comment. Marking bugs fixed
53 works on all supported Bugzilla versions.
53 works on all supported Bugzilla versions.
54
54
55 Access via the REST-API needs either a Bugzilla username and password
55 Access via the REST-API needs either a Bugzilla username and password
56 or an apikey specified in the configuration. Comments are made under
56 or an apikey specified in the configuration. Comments are made under
57 the given username or the user associated with the apikey in Bugzilla.
57 the given username or the user associated with the apikey in Bugzilla.
58
58
59 Configuration items common to all access modes:
59 Configuration items common to all access modes:
60
60
61 bugzilla.version
61 bugzilla.version
62 The access type to use. Values recognized are:
62 The access type to use. Values recognized are:
63
63
64 :``restapi``: Bugzilla REST-API, Bugzilla 5.0 and later.
64 :``restapi``: Bugzilla REST-API, Bugzilla 5.0 and later.
65 :``xmlrpc``: Bugzilla XMLRPC interface.
65 :``xmlrpc``: Bugzilla XMLRPC interface.
66 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
66 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
67 :``3.0``: MySQL access, Bugzilla 3.0 and later.
67 :``3.0``: MySQL access, Bugzilla 3.0 and later.
68 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
68 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
69 including 3.0.
69 including 3.0.
70 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
70 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
71 including 2.18.
71 including 2.18.
72
72
73 bugzilla.regexp
73 bugzilla.regexp
74 Regular expression to match bug IDs for update in changeset commit message.
74 Regular expression to match bug IDs for update in changeset commit message.
75 It must contain one "()" named group ``<ids>`` containing the bug
75 It must contain one "()" named group ``<ids>`` containing the bug
76 IDs separated by non-digit characters. It may also contain
76 IDs separated by non-digit characters. It may also contain
77 a named group ``<hours>`` with a floating-point number giving the
77 a named group ``<hours>`` with a floating-point number giving the
78 hours worked on the bug. If no named groups are present, the first
78 hours worked on the bug. If no named groups are present, the first
79 "()" group is assumed to contain the bug IDs, and work time is not
79 "()" group is assumed to contain the bug IDs, and work time is not
80 updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
80 updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
81 ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
81 ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
82 variations thereof, followed by an hours number prefixed by ``h`` or
82 variations thereof, followed by an hours number prefixed by ``h`` or
83 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
83 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
84
84
85 bugzilla.fixregexp
85 bugzilla.fixregexp
86 Regular expression to match bug IDs for marking fixed in changeset
86 Regular expression to match bug IDs for marking fixed in changeset
87 commit message. This must contain a "()" named group ``<ids>` containing
87 commit message. This must contain a "()" named group ``<ids>` containing
88 the bug IDs separated by non-digit characters. It may also contain
88 the bug IDs separated by non-digit characters. It may also contain
89 a named group ``<hours>`` with a floating-point number giving the
89 a named group ``<hours>`` with a floating-point number giving the
90 hours worked on the bug. If no named groups are present, the first
90 hours worked on the bug. If no named groups are present, the first
91 "()" group is assumed to contain the bug IDs, and work time is not
91 "()" group is assumed to contain the bug IDs, and work time is not
92 updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
92 updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
93 ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
93 ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
94 variations thereof, followed by an hours number prefixed by ``h`` or
94 variations thereof, followed by an hours number prefixed by ``h`` or
95 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
95 ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
96
96
97 bugzilla.fixstatus
97 bugzilla.fixstatus
98 The status to set a bug to when marking fixed. Default ``RESOLVED``.
98 The status to set a bug to when marking fixed. Default ``RESOLVED``.
99
99
100 bugzilla.fixresolution
100 bugzilla.fixresolution
101 The resolution to set a bug to when marking fixed. Default ``FIXED``.
101 The resolution to set a bug to when marking fixed. Default ``FIXED``.
102
102
103 bugzilla.style
103 bugzilla.style
104 The style file to use when formatting comments.
104 The style file to use when formatting comments.
105
105
106 bugzilla.template
106 bugzilla.template
107 Template to use when formatting comments. Overrides style if
107 Template to use when formatting comments. Overrides style if
108 specified. In addition to the usual Mercurial keywords, the
108 specified. In addition to the usual Mercurial keywords, the
109 extension specifies:
109 extension specifies:
110
110
111 :``{bug}``: The Bugzilla bug ID.
111 :``{bug}``: The Bugzilla bug ID.
112 :``{root}``: The full pathname of the Mercurial repository.
112 :``{root}``: The full pathname of the Mercurial repository.
113 :``{webroot}``: Stripped pathname of the Mercurial repository.
113 :``{webroot}``: Stripped pathname of the Mercurial repository.
114 :``{hgweb}``: Base URL for browsing Mercurial repositories.
114 :``{hgweb}``: Base URL for browsing Mercurial repositories.
115
115
116 Default ``changeset {node|short} in repo {root} refers to bug
116 Default ``changeset {node|short} in repo {root} refers to bug
117 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
117 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
118
118
119 bugzilla.strip
119 bugzilla.strip
120 The number of path separator characters to strip from the front of
120 The number of path separator characters to strip from the front of
121 the Mercurial repository path (``{root}`` in templates) to produce
121 the Mercurial repository path (``{root}`` in templates) to produce
122 ``{webroot}``. For example, a repository with ``{root}``
122 ``{webroot}``. For example, a repository with ``{root}``
123 ``/var/local/my-project`` with a strip of 2 gives a value for
123 ``/var/local/my-project`` with a strip of 2 gives a value for
124 ``{webroot}`` of ``my-project``. Default 0.
124 ``{webroot}`` of ``my-project``. Default 0.
125
125
126 web.baseurl
126 web.baseurl
127 Base URL for browsing Mercurial repositories. Referenced from
127 Base URL for browsing Mercurial repositories. Referenced from
128 templates as ``{hgweb}``.
128 templates as ``{hgweb}``.
129
129
130 Configuration items common to XMLRPC+email and MySQL access modes:
130 Configuration items common to XMLRPC+email and MySQL access modes:
131
131
132 bugzilla.usermap
132 bugzilla.usermap
133 Path of file containing Mercurial committer email to Bugzilla user email
133 Path of file containing Mercurial committer email to Bugzilla user email
134 mappings. If specified, the file should contain one mapping per
134 mappings. If specified, the file should contain one mapping per
135 line::
135 line::
136
136
137 committer = Bugzilla user
137 committer = Bugzilla user
138
138
139 See also the ``[usermap]`` section.
139 See also the ``[usermap]`` section.
140
140
141 The ``[usermap]`` section is used to specify mappings of Mercurial
141 The ``[usermap]`` section is used to specify mappings of Mercurial
142 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
142 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
143 Contains entries of the form ``committer = Bugzilla user``.
143 Contains entries of the form ``committer = Bugzilla user``.
144
144
145 XMLRPC and REST-API access mode configuration:
145 XMLRPC and REST-API access mode configuration:
146
146
147 bugzilla.bzurl
147 bugzilla.bzurl
148 The base URL for the Bugzilla installation.
148 The base URL for the Bugzilla installation.
149 Default ``http://localhost/bugzilla``.
149 Default ``http://localhost/bugzilla``.
150
150
151 bugzilla.user
151 bugzilla.user
152 The username to use to log into Bugzilla via XMLRPC. Default
152 The username to use to log into Bugzilla via XMLRPC. Default
153 ``bugs``.
153 ``bugs``.
154
154
155 bugzilla.password
155 bugzilla.password
156 The password for Bugzilla login.
156 The password for Bugzilla login.
157
157
158 REST-API access mode uses the options listed above as well as:
158 REST-API access mode uses the options listed above as well as:
159
159
160 bugzilla.apikey
160 bugzilla.apikey
161 An apikey generated on the Bugzilla instance for api access.
161 An apikey generated on the Bugzilla instance for api access.
162 Using an apikey removes the need to store the user and password
162 Using an apikey removes the need to store the user and password
163 options.
163 options.
164
164
165 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
165 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
166 and also:
166 and also:
167
167
168 bugzilla.bzemail
168 bugzilla.bzemail
169 The Bugzilla email address.
169 The Bugzilla email address.
170
170
171 In addition, the Mercurial email settings must be configured. See the
171 In addition, the Mercurial email settings must be configured. See the
172 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
172 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
173
173
174 MySQL access mode configuration:
174 MySQL access mode configuration:
175
175
176 bugzilla.host
176 bugzilla.host
177 Hostname of the MySQL server holding the Bugzilla database.
177 Hostname of the MySQL server holding the Bugzilla database.
178 Default ``localhost``.
178 Default ``localhost``.
179
179
180 bugzilla.db
180 bugzilla.db
181 Name of the Bugzilla database in MySQL. Default ``bugs``.
181 Name of the Bugzilla database in MySQL. Default ``bugs``.
182
182
183 bugzilla.user
183 bugzilla.user
184 Username to use to access MySQL server. Default ``bugs``.
184 Username to use to access MySQL server. Default ``bugs``.
185
185
186 bugzilla.password
186 bugzilla.password
187 Password to use to access MySQL server.
187 Password to use to access MySQL server.
188
188
189 bugzilla.timeout
189 bugzilla.timeout
190 Database connection timeout (seconds). Default 5.
190 Database connection timeout (seconds). Default 5.
191
191
192 bugzilla.bzuser
192 bugzilla.bzuser
193 Fallback Bugzilla user name to record comments with, if changeset
193 Fallback Bugzilla user name to record comments with, if changeset
194 committer cannot be found as a Bugzilla user.
194 committer cannot be found as a Bugzilla user.
195
195
196 bugzilla.bzdir
196 bugzilla.bzdir
197 Bugzilla install directory. Used by default notify. Default
197 Bugzilla install directory. Used by default notify. Default
198 ``/var/www/html/bugzilla``.
198 ``/var/www/html/bugzilla``.
199
199
200 bugzilla.notify
200 bugzilla.notify
201 The command to run to get Bugzilla to send bug change notification
201 The command to run to get Bugzilla to send bug change notification
202 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
202 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
203 id) and ``user`` (committer bugzilla email). Default depends on
203 id) and ``user`` (committer bugzilla email). Default depends on
204 version; from 2.18 it is "cd %(bzdir)s && perl -T
204 version; from 2.18 it is "cd %(bzdir)s && perl -T
205 contrib/sendbugmail.pl %(id)s %(user)s".
205 contrib/sendbugmail.pl %(id)s %(user)s".
206
206
207 Activating the extension::
207 Activating the extension::
208
208
209 [extensions]
209 [extensions]
210 bugzilla =
210 bugzilla =
211
211
212 [hooks]
212 [hooks]
213 # run bugzilla hook on every change pulled or pushed in here
213 # run bugzilla hook on every change pulled or pushed in here
214 incoming.bugzilla = python:hgext.bugzilla.hook
214 incoming.bugzilla = python:hgext.bugzilla.hook
215
215
216 Example configurations:
216 Example configurations:
217
217
218 XMLRPC example configuration. This uses the Bugzilla at
218 XMLRPC example configuration. This uses the Bugzilla at
219 ``http://my-project.org/bugzilla``, logging in as user
219 ``http://my-project.org/bugzilla``, logging in as user
220 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
220 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
221 collection of Mercurial repositories in ``/var/local/hg/repos/``,
221 collection of Mercurial repositories in ``/var/local/hg/repos/``,
222 with a web interface at ``http://my-project.org/hg``. ::
222 with a web interface at ``http://my-project.org/hg``. ::
223
223
224 [bugzilla]
224 [bugzilla]
225 bzurl=http://my-project.org/bugzilla
225 bzurl=http://my-project.org/bugzilla
226 user=bugmail@my-project.org
226 user=bugmail@my-project.org
227 password=plugh
227 password=plugh
228 version=xmlrpc
228 version=xmlrpc
229 template=Changeset {node|short} in {root|basename}.
229 template=Changeset {node|short} in {root|basename}.
230 {hgweb}/{webroot}/rev/{node|short}\\n
230 {hgweb}/{webroot}/rev/{node|short}\\n
231 {desc}\\n
231 {desc}\\n
232 strip=5
232 strip=5
233
233
234 [web]
234 [web]
235 baseurl=http://my-project.org/hg
235 baseurl=http://my-project.org/hg
236
236
237 XMLRPC+email example configuration. This uses the Bugzilla at
237 XMLRPC+email example configuration. This uses the Bugzilla at
238 ``http://my-project.org/bugzilla``, logging in as user
238 ``http://my-project.org/bugzilla``, logging in as user
239 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
239 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
240 collection of Mercurial repositories in ``/var/local/hg/repos/``,
240 collection of Mercurial repositories in ``/var/local/hg/repos/``,
241 with a web interface at ``http://my-project.org/hg``. Bug comments
241 with a web interface at ``http://my-project.org/hg``. Bug comments
242 are sent to the Bugzilla email address
242 are sent to the Bugzilla email address
243 ``bugzilla@my-project.org``. ::
243 ``bugzilla@my-project.org``. ::
244
244
245 [bugzilla]
245 [bugzilla]
246 bzurl=http://my-project.org/bugzilla
246 bzurl=http://my-project.org/bugzilla
247 user=bugmail@my-project.org
247 user=bugmail@my-project.org
248 password=plugh
248 password=plugh
249 version=xmlrpc+email
249 version=xmlrpc+email
250 bzemail=bugzilla@my-project.org
250 bzemail=bugzilla@my-project.org
251 template=Changeset {node|short} in {root|basename}.
251 template=Changeset {node|short} in {root|basename}.
252 {hgweb}/{webroot}/rev/{node|short}\\n
252 {hgweb}/{webroot}/rev/{node|short}\\n
253 {desc}\\n
253 {desc}\\n
254 strip=5
254 strip=5
255
255
256 [web]
256 [web]
257 baseurl=http://my-project.org/hg
257 baseurl=http://my-project.org/hg
258
258
259 [usermap]
259 [usermap]
260 user@emaildomain.com=user.name@bugzilladomain.com
260 user@emaildomain.com=user.name@bugzilladomain.com
261
261
262 MySQL example configuration. This has a local Bugzilla 3.2 installation
262 MySQL example configuration. This has a local Bugzilla 3.2 installation
263 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
263 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
264 the Bugzilla database name is ``bugs`` and MySQL is
264 the Bugzilla database name is ``bugs`` and MySQL is
265 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
265 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
266 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
266 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
267 with a web interface at ``http://my-project.org/hg``. ::
267 with a web interface at ``http://my-project.org/hg``. ::
268
268
269 [bugzilla]
269 [bugzilla]
270 host=localhost
270 host=localhost
271 password=XYZZY
271 password=XYZZY
272 version=3.0
272 version=3.0
273 bzuser=unknown@domain.com
273 bzuser=unknown@domain.com
274 bzdir=/opt/bugzilla-3.2
274 bzdir=/opt/bugzilla-3.2
275 template=Changeset {node|short} in {root|basename}.
275 template=Changeset {node|short} in {root|basename}.
276 {hgweb}/{webroot}/rev/{node|short}\\n
276 {hgweb}/{webroot}/rev/{node|short}\\n
277 {desc}\\n
277 {desc}\\n
278 strip=5
278 strip=5
279
279
280 [web]
280 [web]
281 baseurl=http://my-project.org/hg
281 baseurl=http://my-project.org/hg
282
282
283 [usermap]
283 [usermap]
284 user@emaildomain.com=user.name@bugzilladomain.com
284 user@emaildomain.com=user.name@bugzilladomain.com
285
285
286 All the above add a comment to the Bugzilla bug record of the form::
286 All the above add a comment to the Bugzilla bug record of the form::
287
287
288 Changeset 3b16791d6642 in repository-name.
288 Changeset 3b16791d6642 in repository-name.
289 http://my-project.org/hg/repository-name/rev/3b16791d6642
289 http://my-project.org/hg/repository-name/rev/3b16791d6642
290
290
291 Changeset commit comment. Bug 1234.
291 Changeset commit comment. Bug 1234.
292 '''
292 '''
293
293
294 from __future__ import absolute_import
294 from __future__ import absolute_import
295
295
296 import json
296 import json
297 import re
297 import re
298 import time
298 import time
299
299
300 from mercurial.i18n import _
300 from mercurial.i18n import _
301 from mercurial.node import short
301 from mercurial.node import short
302 from mercurial import (
302 from mercurial import (
303 error,
303 error,
304 logcmdutil,
304 logcmdutil,
305 mail,
305 mail,
306 registrar,
306 registrar,
307 url,
307 url,
308 util,
308 util,
309 )
309 )
310
310
311 xmlrpclib = util.xmlrpclib
311 xmlrpclib = util.xmlrpclib
312
312
313 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
313 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
314 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
314 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
315 # be specifying the version(s) of Mercurial they are tested with, or
315 # be specifying the version(s) of Mercurial they are tested with, or
316 # leave the attribute unspecified.
316 # leave the attribute unspecified.
317 testedwith = 'ships-with-hg-core'
317 testedwith = 'ships-with-hg-core'
318
318
319 configtable = {}
319 configtable = {}
320 configitem = registrar.configitem(configtable)
320 configitem = registrar.configitem(configtable)
321
321
322 configitem('bugzilla', 'apikey',
322 configitem('bugzilla', 'apikey',
323 default='',
323 default='',
324 )
324 )
325 configitem('bugzilla', 'bzdir',
325 configitem('bugzilla', 'bzdir',
326 default='/var/www/html/bugzilla',
326 default='/var/www/html/bugzilla',
327 )
327 )
328 configitem('bugzilla', 'bzemail',
328 configitem('bugzilla', 'bzemail',
329 default=None,
329 default=None,
330 )
330 )
331 configitem('bugzilla', 'bzurl',
331 configitem('bugzilla', 'bzurl',
332 default='http://localhost/bugzilla/',
332 default='http://localhost/bugzilla/',
333 )
333 )
334 configitem('bugzilla', 'bzuser',
334 configitem('bugzilla', 'bzuser',
335 default=None,
335 default=None,
336 )
336 )
337 configitem('bugzilla', 'db',
337 configitem('bugzilla', 'db',
338 default='bugs',
338 default='bugs',
339 )
339 )
340 configitem('bugzilla', 'fixregexp',
340 configitem('bugzilla', 'fixregexp',
341 default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
341 default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
342 r'(?:nos?\.?|num(?:ber)?s?)?\s*'
342 r'(?:nos?\.?|num(?:ber)?s?)?\s*'
343 r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
343 r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
344 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
344 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
345 )
345 )
346 configitem('bugzilla', 'fixresolution',
346 configitem('bugzilla', 'fixresolution',
347 default='FIXED',
347 default='FIXED',
348 )
348 )
349 configitem('bugzilla', 'fixstatus',
349 configitem('bugzilla', 'fixstatus',
350 default='RESOLVED',
350 default='RESOLVED',
351 )
351 )
352 configitem('bugzilla', 'host',
352 configitem('bugzilla', 'host',
353 default='localhost',
353 default='localhost',
354 )
354 )
355 configitem('bugzilla', 'notify',
355 configitem('bugzilla', 'notify',
356 default=configitem.dynamicdefault,
356 default=configitem.dynamicdefault,
357 )
357 )
358 configitem('bugzilla', 'password',
358 configitem('bugzilla', 'password',
359 default=None,
359 default=None,
360 )
360 )
361 configitem('bugzilla', 'regexp',
361 configitem('bugzilla', 'regexp',
362 default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
362 default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
363 r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
363 r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
364 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
364 r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
365 )
365 )
366 configitem('bugzilla', 'strip',
366 configitem('bugzilla', 'strip',
367 default=0,
367 default=0,
368 )
368 )
369 configitem('bugzilla', 'style',
369 configitem('bugzilla', 'style',
370 default=None,
370 default=None,
371 )
371 )
372 configitem('bugzilla', 'template',
372 configitem('bugzilla', 'template',
373 default=None,
373 default=None,
374 )
374 )
375 configitem('bugzilla', 'timeout',
375 configitem('bugzilla', 'timeout',
376 default=5,
376 default=5,
377 )
377 )
378 configitem('bugzilla', 'user',
378 configitem('bugzilla', 'user',
379 default='bugs',
379 default='bugs',
380 )
380 )
381 configitem('bugzilla', 'usermap',
381 configitem('bugzilla', 'usermap',
382 default=None,
382 default=None,
383 )
383 )
384 configitem('bugzilla', 'version',
384 configitem('bugzilla', 'version',
385 default=None,
385 default=None,
386 )
386 )
387
387
388 class bzaccess(object):
388 class bzaccess(object):
389 '''Base class for access to Bugzilla.'''
389 '''Base class for access to Bugzilla.'''
390
390
391 def __init__(self, ui):
391 def __init__(self, ui):
392 self.ui = ui
392 self.ui = ui
393 usermap = self.ui.config('bugzilla', 'usermap')
393 usermap = self.ui.config('bugzilla', 'usermap')
394 if usermap:
394 if usermap:
395 self.ui.readconfig(usermap, sections=['usermap'])
395 self.ui.readconfig(usermap, sections=['usermap'])
396
396
397 def map_committer(self, user):
397 def map_committer(self, user):
398 '''map name of committer to Bugzilla user name.'''
398 '''map name of committer to Bugzilla user name.'''
399 for committer, bzuser in self.ui.configitems('usermap'):
399 for committer, bzuser in self.ui.configitems('usermap'):
400 if committer.lower() == user.lower():
400 if committer.lower() == user.lower():
401 return bzuser
401 return bzuser
402 return user
402 return user
403
403
404 # Methods to be implemented by access classes.
404 # Methods to be implemented by access classes.
405 #
405 #
406 # 'bugs' is a dict keyed on bug id, where values are a dict holding
406 # 'bugs' is a dict keyed on bug id, where values are a dict holding
407 # updates to bug state. Recognized dict keys are:
407 # updates to bug state. Recognized dict keys are:
408 #
408 #
409 # 'hours': Value, float containing work hours to be updated.
409 # 'hours': Value, float containing work hours to be updated.
410 # 'fix': If key present, bug is to be marked fixed. Value ignored.
410 # 'fix': If key present, bug is to be marked fixed. Value ignored.
411
411
412 def filter_real_bug_ids(self, bugs):
412 def filter_real_bug_ids(self, bugs):
413 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
413 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
414
414
415 def filter_cset_known_bug_ids(self, node, bugs):
415 def filter_cset_known_bug_ids(self, node, bugs):
416 '''remove bug IDs where node occurs in comment text from bugs.'''
416 '''remove bug IDs where node occurs in comment text from bugs.'''
417
417
418 def updatebug(self, bugid, newstate, text, committer):
418 def updatebug(self, bugid, newstate, text, committer):
419 '''update the specified bug. Add comment text and set new states.
419 '''update the specified bug. Add comment text and set new states.
420
420
421 If possible add the comment as being from the committer of
421 If possible add the comment as being from the committer of
422 the changeset. Otherwise use the default Bugzilla user.
422 the changeset. Otherwise use the default Bugzilla user.
423 '''
423 '''
424
424
425 def notify(self, bugs, committer):
425 def notify(self, bugs, committer):
426 '''Force sending of Bugzilla notification emails.
426 '''Force sending of Bugzilla notification emails.
427
427
428 Only required if the access method does not trigger notification
428 Only required if the access method does not trigger notification
429 emails automatically.
429 emails automatically.
430 '''
430 '''
431
431
432 # Bugzilla via direct access to MySQL database.
432 # Bugzilla via direct access to MySQL database.
433 class bzmysql(bzaccess):
433 class bzmysql(bzaccess):
434 '''Support for direct MySQL access to Bugzilla.
434 '''Support for direct MySQL access to Bugzilla.
435
435
436 The earliest Bugzilla version this is tested with is version 2.16.
436 The earliest Bugzilla version this is tested with is version 2.16.
437
437
438 If your Bugzilla is version 3.4 or above, you are strongly
438 If your Bugzilla is version 3.4 or above, you are strongly
439 recommended to use the XMLRPC access method instead.
439 recommended to use the XMLRPC access method instead.
440 '''
440 '''
441
441
442 @staticmethod
442 @staticmethod
443 def sql_buglist(ids):
443 def sql_buglist(ids):
444 '''return SQL-friendly list of bug ids'''
444 '''return SQL-friendly list of bug ids'''
445 return '(' + ','.join(map(str, ids)) + ')'
445 return '(' + ','.join(map(str, ids)) + ')'
446
446
447 _MySQLdb = None
447 _MySQLdb = None
448
448
449 def __init__(self, ui):
449 def __init__(self, ui):
450 try:
450 try:
451 import MySQLdb as mysql
451 import MySQLdb as mysql
452 bzmysql._MySQLdb = mysql
452 bzmysql._MySQLdb = mysql
453 except ImportError as err:
453 except ImportError as err:
454 raise error.Abort(_('python mysql support not available: %s') % err)
454 raise error.Abort(_('python mysql support not available: %s') % err)
455
455
456 bzaccess.__init__(self, ui)
456 bzaccess.__init__(self, ui)
457
457
458 host = self.ui.config('bugzilla', 'host')
458 host = self.ui.config('bugzilla', 'host')
459 user = self.ui.config('bugzilla', 'user')
459 user = self.ui.config('bugzilla', 'user')
460 passwd = self.ui.config('bugzilla', 'password')
460 passwd = self.ui.config('bugzilla', 'password')
461 db = self.ui.config('bugzilla', 'db')
461 db = self.ui.config('bugzilla', 'db')
462 timeout = int(self.ui.config('bugzilla', 'timeout'))
462 timeout = int(self.ui.config('bugzilla', 'timeout'))
463 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
463 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
464 (host, db, user, '*' * len(passwd)))
464 (host, db, user, '*' * len(passwd)))
465 self.conn = bzmysql._MySQLdb.connect(host=host,
465 self.conn = bzmysql._MySQLdb.connect(host=host,
466 user=user, passwd=passwd,
466 user=user, passwd=passwd,
467 db=db,
467 db=db,
468 connect_timeout=timeout)
468 connect_timeout=timeout)
469 self.cursor = self.conn.cursor()
469 self.cursor = self.conn.cursor()
470 self.longdesc_id = self.get_longdesc_id()
470 self.longdesc_id = self.get_longdesc_id()
471 self.user_ids = {}
471 self.user_ids = {}
472 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
472 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
473
473
474 def run(self, *args, **kwargs):
474 def run(self, *args, **kwargs):
475 '''run a query.'''
475 '''run a query.'''
476 self.ui.note(_('query: %s %s\n') % (args, kwargs))
476 self.ui.note(_('query: %s %s\n') % (args, kwargs))
477 try:
477 try:
478 self.cursor.execute(*args, **kwargs)
478 self.cursor.execute(*args, **kwargs)
479 except bzmysql._MySQLdb.MySQLError:
479 except bzmysql._MySQLdb.MySQLError:
480 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
480 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
481 raise
481 raise
482
482
483 def get_longdesc_id(self):
483 def get_longdesc_id(self):
484 '''get identity of longdesc field'''
484 '''get identity of longdesc field'''
485 self.run('select fieldid from fielddefs where name = "longdesc"')
485 self.run('select fieldid from fielddefs where name = "longdesc"')
486 ids = self.cursor.fetchall()
486 ids = self.cursor.fetchall()
487 if len(ids) != 1:
487 if len(ids) != 1:
488 raise error.Abort(_('unknown database schema'))
488 raise error.Abort(_('unknown database schema'))
489 return ids[0][0]
489 return ids[0][0]
490
490
491 def filter_real_bug_ids(self, bugs):
491 def filter_real_bug_ids(self, bugs):
492 '''filter not-existing bugs from set.'''
492 '''filter not-existing bugs from set.'''
493 self.run('select bug_id from bugs where bug_id in %s' %
493 self.run('select bug_id from bugs where bug_id in %s' %
494 bzmysql.sql_buglist(bugs.keys()))
494 bzmysql.sql_buglist(bugs.keys()))
495 existing = [id for (id,) in self.cursor.fetchall()]
495 existing = [id for (id,) in self.cursor.fetchall()]
496 for id in bugs.keys():
496 for id in bugs.keys():
497 if id not in existing:
497 if id not in existing:
498 self.ui.status(_('bug %d does not exist\n') % id)
498 self.ui.status(_('bug %d does not exist\n') % id)
499 del bugs[id]
499 del bugs[id]
500
500
501 def filter_cset_known_bug_ids(self, node, bugs):
501 def filter_cset_known_bug_ids(self, node, bugs):
502 '''filter bug ids that already refer to this changeset from set.'''
502 '''filter bug ids that already refer to this changeset from set.'''
503 self.run('''select bug_id from longdescs where
503 self.run('''select bug_id from longdescs where
504 bug_id in %s and thetext like "%%%s%%"''' %
504 bug_id in %s and thetext like "%%%s%%"''' %
505 (bzmysql.sql_buglist(bugs.keys()), short(node)))
505 (bzmysql.sql_buglist(bugs.keys()), short(node)))
506 for (id,) in self.cursor.fetchall():
506 for (id,) in self.cursor.fetchall():
507 self.ui.status(_('bug %d already knows about changeset %s\n') %
507 self.ui.status(_('bug %d already knows about changeset %s\n') %
508 (id, short(node)))
508 (id, short(node)))
509 del bugs[id]
509 del bugs[id]
510
510
511 def notify(self, bugs, committer):
511 def notify(self, bugs, committer):
512 '''tell bugzilla to send mail.'''
512 '''tell bugzilla to send mail.'''
513 self.ui.status(_('telling bugzilla to send mail:\n'))
513 self.ui.status(_('telling bugzilla to send mail:\n'))
514 (user, userid) = self.get_bugzilla_user(committer)
514 (user, userid) = self.get_bugzilla_user(committer)
515 for id in bugs.keys():
515 for id in bugs.keys():
516 self.ui.status(_(' bug %s\n') % id)
516 self.ui.status(_(' bug %s\n') % id)
517 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
517 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
518 bzdir = self.ui.config('bugzilla', 'bzdir')
518 bzdir = self.ui.config('bugzilla', 'bzdir')
519 try:
519 try:
520 # Backwards-compatible with old notify string, which
520 # Backwards-compatible with old notify string, which
521 # took one string. This will throw with a new format
521 # took one string. This will throw with a new format
522 # string.
522 # string.
523 cmd = cmdfmt % id
523 cmd = cmdfmt % id
524 except TypeError:
524 except TypeError:
525 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
525 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
526 self.ui.note(_('running notify command %s\n') % cmd)
526 self.ui.note(_('running notify command %s\n') % cmd)
527 fp = util.popen('(%s) 2>&1' % cmd)
527 fp = util.popen('(%s) 2>&1' % cmd)
528 out = fp.read()
528 out = fp.read()
529 ret = fp.close()
529 ret = fp.close()
530 if ret:
530 if ret:
531 self.ui.warn(out)
531 self.ui.warn(out)
532 raise error.Abort(_('bugzilla notify command %s') %
532 raise error.Abort(_('bugzilla notify command %s') %
533 util.explainexit(ret)[0])
533 util.explainexit(ret)[0])
534 self.ui.status(_('done\n'))
534 self.ui.status(_('done\n'))
535
535
536 def get_user_id(self, user):
536 def get_user_id(self, user):
537 '''look up numeric bugzilla user id.'''
537 '''look up numeric bugzilla user id.'''
538 try:
538 try:
539 return self.user_ids[user]
539 return self.user_ids[user]
540 except KeyError:
540 except KeyError:
541 try:
541 try:
542 userid = int(user)
542 userid = int(user)
543 except ValueError:
543 except ValueError:
544 self.ui.note(_('looking up user %s\n') % user)
544 self.ui.note(_('looking up user %s\n') % user)
545 self.run('''select userid from profiles
545 self.run('''select userid from profiles
546 where login_name like %s''', user)
546 where login_name like %s''', user)
547 all = self.cursor.fetchall()
547 all = self.cursor.fetchall()
548 if len(all) != 1:
548 if len(all) != 1:
549 raise KeyError(user)
549 raise KeyError(user)
550 userid = int(all[0][0])
550 userid = int(all[0][0])
551 self.user_ids[user] = userid
551 self.user_ids[user] = userid
552 return userid
552 return userid
553
553
554 def get_bugzilla_user(self, committer):
554 def get_bugzilla_user(self, committer):
555 '''See if committer is a registered bugzilla user. Return
555 '''See if committer is a registered bugzilla user. Return
556 bugzilla username and userid if so. If not, return default
556 bugzilla username and userid if so. If not, return default
557 bugzilla username and userid.'''
557 bugzilla username and userid.'''
558 user = self.map_committer(committer)
558 user = self.map_committer(committer)
559 try:
559 try:
560 userid = self.get_user_id(user)
560 userid = self.get_user_id(user)
561 except KeyError:
561 except KeyError:
562 try:
562 try:
563 defaultuser = self.ui.config('bugzilla', 'bzuser')
563 defaultuser = self.ui.config('bugzilla', 'bzuser')
564 if not defaultuser:
564 if not defaultuser:
565 raise error.Abort(_('cannot find bugzilla user id for %s') %
565 raise error.Abort(_('cannot find bugzilla user id for %s') %
566 user)
566 user)
567 userid = self.get_user_id(defaultuser)
567 userid = self.get_user_id(defaultuser)
568 user = defaultuser
568 user = defaultuser
569 except KeyError:
569 except KeyError:
570 raise error.Abort(_('cannot find bugzilla user id for %s or %s')
570 raise error.Abort(_('cannot find bugzilla user id for %s or %s')
571 % (user, defaultuser))
571 % (user, defaultuser))
572 return (user, userid)
572 return (user, userid)
573
573
574 def updatebug(self, bugid, newstate, text, committer):
574 def updatebug(self, bugid, newstate, text, committer):
575 '''update bug state with comment text.
575 '''update bug state with comment text.
576
576
577 Try adding comment as committer of changeset, otherwise as
577 Try adding comment as committer of changeset, otherwise as
578 default bugzilla user.'''
578 default bugzilla user.'''
579 if len(newstate) > 0:
579 if len(newstate) > 0:
580 self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
580 self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
581
581
582 (user, userid) = self.get_bugzilla_user(committer)
582 (user, userid) = self.get_bugzilla_user(committer)
583 now = time.strftime(r'%Y-%m-%d %H:%M:%S')
583 now = time.strftime(r'%Y-%m-%d %H:%M:%S')
584 self.run('''insert into longdescs
584 self.run('''insert into longdescs
585 (bug_id, who, bug_when, thetext)
585 (bug_id, who, bug_when, thetext)
586 values (%s, %s, %s, %s)''',
586 values (%s, %s, %s, %s)''',
587 (bugid, userid, now, text))
587 (bugid, userid, now, text))
588 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
588 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
589 values (%s, %s, %s, %s)''',
589 values (%s, %s, %s, %s)''',
590 (bugid, userid, now, self.longdesc_id))
590 (bugid, userid, now, self.longdesc_id))
591 self.conn.commit()
591 self.conn.commit()
592
592
593 class bzmysql_2_18(bzmysql):
593 class bzmysql_2_18(bzmysql):
594 '''support for bugzilla 2.18 series.'''
594 '''support for bugzilla 2.18 series.'''
595
595
596 def __init__(self, ui):
596 def __init__(self, ui):
597 bzmysql.__init__(self, ui)
597 bzmysql.__init__(self, ui)
598 self.default_notify = \
598 self.default_notify = \
599 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
599 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
600
600
601 class bzmysql_3_0(bzmysql_2_18):
601 class bzmysql_3_0(bzmysql_2_18):
602 '''support for bugzilla 3.0 series.'''
602 '''support for bugzilla 3.0 series.'''
603
603
604 def __init__(self, ui):
604 def __init__(self, ui):
605 bzmysql_2_18.__init__(self, ui)
605 bzmysql_2_18.__init__(self, ui)
606
606
607 def get_longdesc_id(self):
607 def get_longdesc_id(self):
608 '''get identity of longdesc field'''
608 '''get identity of longdesc field'''
609 self.run('select id from fielddefs where name = "longdesc"')
609 self.run('select id from fielddefs where name = "longdesc"')
610 ids = self.cursor.fetchall()
610 ids = self.cursor.fetchall()
611 if len(ids) != 1:
611 if len(ids) != 1:
612 raise error.Abort(_('unknown database schema'))
612 raise error.Abort(_('unknown database schema'))
613 return ids[0][0]
613 return ids[0][0]
614
614
615 # Bugzilla via XMLRPC interface.
615 # Bugzilla via XMLRPC interface.
616
616
617 class cookietransportrequest(object):
617 class cookietransportrequest(object):
618 """A Transport request method that retains cookies over its lifetime.
618 """A Transport request method that retains cookies over its lifetime.
619
619
620 The regular xmlrpclib transports ignore cookies. Which causes
620 The regular xmlrpclib transports ignore cookies. Which causes
621 a bit of a problem when you need a cookie-based login, as with
621 a bit of a problem when you need a cookie-based login, as with
622 the Bugzilla XMLRPC interface prior to 4.4.3.
622 the Bugzilla XMLRPC interface prior to 4.4.3.
623
623
624 So this is a helper for defining a Transport which looks for
624 So this is a helper for defining a Transport which looks for
625 cookies being set in responses and saves them to add to all future
625 cookies being set in responses and saves them to add to all future
626 requests.
626 requests.
627 """
627 """
628
628
629 # Inspiration drawn from
629 # Inspiration drawn from
630 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
630 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
631 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
631 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
632
632
633 cookies = []
633 cookies = []
634 def send_cookies(self, connection):
634 def send_cookies(self, connection):
635 if self.cookies:
635 if self.cookies:
636 for cookie in self.cookies:
636 for cookie in self.cookies:
637 connection.putheader("Cookie", cookie)
637 connection.putheader("Cookie", cookie)
638
638
639 def request(self, host, handler, request_body, verbose=0):
639 def request(self, host, handler, request_body, verbose=0):
640 self.verbose = verbose
640 self.verbose = verbose
641 self.accept_gzip_encoding = False
641 self.accept_gzip_encoding = False
642
642
643 # issue XML-RPC request
643 # issue XML-RPC request
644 h = self.make_connection(host)
644 h = self.make_connection(host)
645 if verbose:
645 if verbose:
646 h.set_debuglevel(1)
646 h.set_debuglevel(1)
647
647
648 self.send_request(h, handler, request_body)
648 self.send_request(h, handler, request_body)
649 self.send_host(h, host)
649 self.send_host(h, host)
650 self.send_cookies(h)
650 self.send_cookies(h)
651 self.send_user_agent(h)
651 self.send_user_agent(h)
652 self.send_content(h, request_body)
652 self.send_content(h, request_body)
653
653
654 # Deal with differences between Python 2.6 and 2.7.
654 # Deal with differences between Python 2.6 and 2.7.
655 # In the former h is a HTTP(S). In the latter it's a
655 # In the former h is a HTTP(S). In the latter it's a
656 # HTTP(S)Connection. Luckily, the 2.6 implementation of
656 # HTTP(S)Connection. Luckily, the 2.6 implementation of
657 # HTTP(S) has an underlying HTTP(S)Connection, so extract
657 # HTTP(S) has an underlying HTTP(S)Connection, so extract
658 # that and use it.
658 # that and use it.
659 try:
659 try:
660 response = h.getresponse()
660 response = h.getresponse()
661 except AttributeError:
661 except AttributeError:
662 response = h._conn.getresponse()
662 response = h._conn.getresponse()
663
663
664 # Add any cookie definitions to our list.
664 # Add any cookie definitions to our list.
665 for header in response.msg.getallmatchingheaders("Set-Cookie"):
665 for header in response.msg.getallmatchingheaders("Set-Cookie"):
666 val = header.split(": ", 1)[1]
666 val = header.split(": ", 1)[1]
667 cookie = val.split(";", 1)[0]
667 cookie = val.split(";", 1)[0]
668 self.cookies.append(cookie)
668 self.cookies.append(cookie)
669
669
670 if response.status != 200:
670 if response.status != 200:
671 raise xmlrpclib.ProtocolError(host + handler, response.status,
671 raise xmlrpclib.ProtocolError(host + handler, response.status,
672 response.reason, response.msg.headers)
672 response.reason, response.msg.headers)
673
673
674 payload = response.read()
674 payload = response.read()
675 parser, unmarshaller = self.getparser()
675 parser, unmarshaller = self.getparser()
676 parser.feed(payload)
676 parser.feed(payload)
677 parser.close()
677 parser.close()
678
678
679 return unmarshaller.close()
679 return unmarshaller.close()
680
680
681 # The explicit calls to the underlying xmlrpclib __init__() methods are
681 # The explicit calls to the underlying xmlrpclib __init__() methods are
682 # necessary. The xmlrpclib.Transport classes are old-style classes, and
682 # necessary. The xmlrpclib.Transport classes are old-style classes, and
683 # it turns out their __init__() doesn't get called when doing multiple
683 # it turns out their __init__() doesn't get called when doing multiple
684 # inheritance with a new-style class.
684 # inheritance with a new-style class.
685 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
685 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
686 def __init__(self, use_datetime=0):
686 def __init__(self, use_datetime=0):
687 if util.safehasattr(xmlrpclib.Transport, "__init__"):
687 if util.safehasattr(xmlrpclib.Transport, "__init__"):
688 xmlrpclib.Transport.__init__(self, use_datetime)
688 xmlrpclib.Transport.__init__(self, use_datetime)
689
689
690 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
690 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
691 def __init__(self, use_datetime=0):
691 def __init__(self, use_datetime=0):
692 if util.safehasattr(xmlrpclib.Transport, "__init__"):
692 if util.safehasattr(xmlrpclib.Transport, "__init__"):
693 xmlrpclib.SafeTransport.__init__(self, use_datetime)
693 xmlrpclib.SafeTransport.__init__(self, use_datetime)
694
694
695 class bzxmlrpc(bzaccess):
695 class bzxmlrpc(bzaccess):
696 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
696 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
697
697
698 Requires a minimum Bugzilla version 3.4.
698 Requires a minimum Bugzilla version 3.4.
699 """
699 """
700
700
701 def __init__(self, ui):
701 def __init__(self, ui):
702 bzaccess.__init__(self, ui)
702 bzaccess.__init__(self, ui)
703
703
704 bzweb = self.ui.config('bugzilla', 'bzurl')
704 bzweb = self.ui.config('bugzilla', 'bzurl')
705 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
705 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
706
706
707 user = self.ui.config('bugzilla', 'user')
707 user = self.ui.config('bugzilla', 'user')
708 passwd = self.ui.config('bugzilla', 'password')
708 passwd = self.ui.config('bugzilla', 'password')
709
709
710 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
710 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
711 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
711 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
712
712
713 self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
713 self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
714 ver = self.bzproxy.Bugzilla.version()['version'].split('.')
714 ver = self.bzproxy.Bugzilla.version()['version'].split('.')
715 self.bzvermajor = int(ver[0])
715 self.bzvermajor = int(ver[0])
716 self.bzverminor = int(ver[1])
716 self.bzverminor = int(ver[1])
717 login = self.bzproxy.User.login({'login': user, 'password': passwd,
717 login = self.bzproxy.User.login({'login': user, 'password': passwd,
718 'restrict_login': True})
718 'restrict_login': True})
719 self.bztoken = login.get('token', '')
719 self.bztoken = login.get('token', '')
720
720
721 def transport(self, uri):
721 def transport(self, uri):
722 if util.urlreq.urlparse(uri, "http")[0] == "https":
722 if util.urlreq.urlparse(uri, "http")[0] == "https":
723 return cookiesafetransport()
723 return cookiesafetransport()
724 else:
724 else:
725 return cookietransport()
725 return cookietransport()
726
726
727 def get_bug_comments(self, id):
727 def get_bug_comments(self, id):
728 """Return a string with all comment text for a bug."""
728 """Return a string with all comment text for a bug."""
729 c = self.bzproxy.Bug.comments({'ids': [id],
729 c = self.bzproxy.Bug.comments({'ids': [id],
730 'include_fields': ['text'],
730 'include_fields': ['text'],
731 'token': self.bztoken})
731 'token': self.bztoken})
732 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
732 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
733
733
734 def filter_real_bug_ids(self, bugs):
734 def filter_real_bug_ids(self, bugs):
735 probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
735 probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
736 'include_fields': [],
736 'include_fields': [],
737 'permissive': True,
737 'permissive': True,
738 'token': self.bztoken,
738 'token': self.bztoken,
739 })
739 })
740 for badbug in probe['faults']:
740 for badbug in probe['faults']:
741 id = badbug['id']
741 id = badbug['id']
742 self.ui.status(_('bug %d does not exist\n') % id)
742 self.ui.status(_('bug %d does not exist\n') % id)
743 del bugs[id]
743 del bugs[id]
744
744
745 def filter_cset_known_bug_ids(self, node, bugs):
745 def filter_cset_known_bug_ids(self, node, bugs):
746 for id in sorted(bugs.keys()):
746 for id in sorted(bugs.keys()):
747 if self.get_bug_comments(id).find(short(node)) != -1:
747 if self.get_bug_comments(id).find(short(node)) != -1:
748 self.ui.status(_('bug %d already knows about changeset %s\n') %
748 self.ui.status(_('bug %d already knows about changeset %s\n') %
749 (id, short(node)))
749 (id, short(node)))
750 del bugs[id]
750 del bugs[id]
751
751
752 def updatebug(self, bugid, newstate, text, committer):
752 def updatebug(self, bugid, newstate, text, committer):
753 args = {}
753 args = {}
754 if 'hours' in newstate:
754 if 'hours' in newstate:
755 args['work_time'] = newstate['hours']
755 args['work_time'] = newstate['hours']
756
756
757 if self.bzvermajor >= 4:
757 if self.bzvermajor >= 4:
758 args['ids'] = [bugid]
758 args['ids'] = [bugid]
759 args['comment'] = {'body' : text}
759 args['comment'] = {'body' : text}
760 if 'fix' in newstate:
760 if 'fix' in newstate:
761 args['status'] = self.fixstatus
761 args['status'] = self.fixstatus
762 args['resolution'] = self.fixresolution
762 args['resolution'] = self.fixresolution
763 args['token'] = self.bztoken
763 args['token'] = self.bztoken
764 self.bzproxy.Bug.update(args)
764 self.bzproxy.Bug.update(args)
765 else:
765 else:
766 if 'fix' in newstate:
766 if 'fix' in newstate:
767 self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
767 self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
768 "to mark bugs fixed\n"))
768 "to mark bugs fixed\n"))
769 args['id'] = bugid
769 args['id'] = bugid
770 args['comment'] = text
770 args['comment'] = text
771 self.bzproxy.Bug.add_comment(args)
771 self.bzproxy.Bug.add_comment(args)
772
772
773 class bzxmlrpcemail(bzxmlrpc):
773 class bzxmlrpcemail(bzxmlrpc):
774 """Read data from Bugzilla via XMLRPC, send updates via email.
774 """Read data from Bugzilla via XMLRPC, send updates via email.
775
775
776 Advantages of sending updates via email:
776 Advantages of sending updates via email:
777 1. Comments can be added as any user, not just logged in user.
777 1. Comments can be added as any user, not just logged in user.
778 2. Bug statuses or other fields not accessible via XMLRPC can
778 2. Bug statuses or other fields not accessible via XMLRPC can
779 potentially be updated.
779 potentially be updated.
780
780
781 There is no XMLRPC function to change bug status before Bugzilla
781 There is no XMLRPC function to change bug status before Bugzilla
782 4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
782 4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
783 But bugs can be marked fixed via email from 3.4 onwards.
783 But bugs can be marked fixed via email from 3.4 onwards.
784 """
784 """
785
785
786 # The email interface changes subtly between 3.4 and 3.6. In 3.4,
786 # The email interface changes subtly between 3.4 and 3.6. In 3.4,
787 # in-email fields are specified as '@<fieldname> = <value>'. In
787 # in-email fields are specified as '@<fieldname> = <value>'. In
788 # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
788 # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
789 # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
789 # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
790 # compatibility, but rather than rely on this use the new format for
790 # compatibility, but rather than rely on this use the new format for
791 # 4.0 onwards.
791 # 4.0 onwards.
792
792
793 def __init__(self, ui):
793 def __init__(self, ui):
794 bzxmlrpc.__init__(self, ui)
794 bzxmlrpc.__init__(self, ui)
795
795
796 self.bzemail = self.ui.config('bugzilla', 'bzemail')
796 self.bzemail = self.ui.config('bugzilla', 'bzemail')
797 if not self.bzemail:
797 if not self.bzemail:
798 raise error.Abort(_("configuration 'bzemail' missing"))
798 raise error.Abort(_("configuration 'bzemail' missing"))
799 mail.validateconfig(self.ui)
799 mail.validateconfig(self.ui)
800
800
801 def makecommandline(self, fieldname, value):
801 def makecommandline(self, fieldname, value):
802 if self.bzvermajor >= 4:
802 if self.bzvermajor >= 4:
803 return "@%s %s" % (fieldname, str(value))
803 return "@%s %s" % (fieldname, str(value))
804 else:
804 else:
805 if fieldname == "id":
805 if fieldname == "id":
806 fieldname = "bug_id"
806 fieldname = "bug_id"
807 return "@%s = %s" % (fieldname, str(value))
807 return "@%s = %s" % (fieldname, str(value))
808
808
809 def send_bug_modify_email(self, bugid, commands, comment, committer):
809 def send_bug_modify_email(self, bugid, commands, comment, committer):
810 '''send modification message to Bugzilla bug via email.
810 '''send modification message to Bugzilla bug via email.
811
811
812 The message format is documented in the Bugzilla email_in.pl
812 The message format is documented in the Bugzilla email_in.pl
813 specification. commands is a list of command lines, comment is the
813 specification. commands is a list of command lines, comment is the
814 comment text.
814 comment text.
815
815
816 To stop users from crafting commit comments with
816 To stop users from crafting commit comments with
817 Bugzilla commands, specify the bug ID via the message body, rather
817 Bugzilla commands, specify the bug ID via the message body, rather
818 than the subject line, and leave a blank line after it.
818 than the subject line, and leave a blank line after it.
819 '''
819 '''
820 user = self.map_committer(committer)
820 user = self.map_committer(committer)
821 matches = self.bzproxy.User.get({'match': [user],
821 matches = self.bzproxy.User.get({'match': [user],
822 'token': self.bztoken})
822 'token': self.bztoken})
823 if not matches['users']:
823 if not matches['users']:
824 user = self.ui.config('bugzilla', 'user')
824 user = self.ui.config('bugzilla', 'user')
825 matches = self.bzproxy.User.get({'match': [user],
825 matches = self.bzproxy.User.get({'match': [user],
826 'token': self.bztoken})
826 'token': self.bztoken})
827 if not matches['users']:
827 if not matches['users']:
828 raise error.Abort(_("default bugzilla user %s email not found")
828 raise error.Abort(_("default bugzilla user %s email not found")
829 % user)
829 % user)
830 user = matches['users'][0]['email']
830 user = matches['users'][0]['email']
831 commands.append(self.makecommandline("id", bugid))
831 commands.append(self.makecommandline("id", bugid))
832
832
833 text = "\n".join(commands) + "\n\n" + comment
833 text = "\n".join(commands) + "\n\n" + comment
834
834
835 _charsets = mail._charsets(self.ui)
835 _charsets = mail._charsets(self.ui)
836 user = mail.addressencode(self.ui, user, _charsets)
836 user = mail.addressencode(self.ui, user, _charsets)
837 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
837 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
838 msg = mail.mimeencode(self.ui, text, _charsets)
838 msg = mail.mimeencode(self.ui, text, _charsets)
839 msg['From'] = user
839 msg['From'] = user
840 msg['To'] = bzemail
840 msg['To'] = bzemail
841 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
841 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
842 sendmail = mail.connect(self.ui)
842 sendmail = mail.connect(self.ui)
843 sendmail(user, bzemail, msg.as_string())
843 sendmail(user, bzemail, msg.as_string())
844
844
845 def updatebug(self, bugid, newstate, text, committer):
845 def updatebug(self, bugid, newstate, text, committer):
846 cmds = []
846 cmds = []
847 if 'hours' in newstate:
847 if 'hours' in newstate:
848 cmds.append(self.makecommandline("work_time", newstate['hours']))
848 cmds.append(self.makecommandline("work_time", newstate['hours']))
849 if 'fix' in newstate:
849 if 'fix' in newstate:
850 cmds.append(self.makecommandline("bug_status", self.fixstatus))
850 cmds.append(self.makecommandline("bug_status", self.fixstatus))
851 cmds.append(self.makecommandline("resolution", self.fixresolution))
851 cmds.append(self.makecommandline("resolution", self.fixresolution))
852 self.send_bug_modify_email(bugid, cmds, text, committer)
852 self.send_bug_modify_email(bugid, cmds, text, committer)
853
853
854 class NotFound(LookupError):
854 class NotFound(LookupError):
855 pass
855 pass
856
856
857 class bzrestapi(bzaccess):
857 class bzrestapi(bzaccess):
858 """Read and write bugzilla data using the REST API available since
858 """Read and write bugzilla data using the REST API available since
859 Bugzilla 5.0.
859 Bugzilla 5.0.
860 """
860 """
861 def __init__(self, ui):
861 def __init__(self, ui):
862 bzaccess.__init__(self, ui)
862 bzaccess.__init__(self, ui)
863 bz = self.ui.config('bugzilla', 'bzurl')
863 bz = self.ui.config('bugzilla', 'bzurl')
864 self.bzroot = '/'.join([bz, 'rest'])
864 self.bzroot = '/'.join([bz, 'rest'])
865 self.apikey = self.ui.config('bugzilla', 'apikey')
865 self.apikey = self.ui.config('bugzilla', 'apikey')
866 self.user = self.ui.config('bugzilla', 'user')
866 self.user = self.ui.config('bugzilla', 'user')
867 self.passwd = self.ui.config('bugzilla', 'password')
867 self.passwd = self.ui.config('bugzilla', 'password')
868 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
868 self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
869 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
869 self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
870
870
871 def apiurl(self, targets, include_fields=None):
871 def apiurl(self, targets, include_fields=None):
872 url = '/'.join([self.bzroot] + [str(t) for t in targets])
872 url = '/'.join([self.bzroot] + [str(t) for t in targets])
873 qv = {}
873 qv = {}
874 if self.apikey:
874 if self.apikey:
875 qv['api_key'] = self.apikey
875 qv['api_key'] = self.apikey
876 elif self.user and self.passwd:
876 elif self.user and self.passwd:
877 qv['login'] = self.user
877 qv['login'] = self.user
878 qv['password'] = self.passwd
878 qv['password'] = self.passwd
879 if include_fields:
879 if include_fields:
880 qv['include_fields'] = include_fields
880 qv['include_fields'] = include_fields
881 if qv:
881 if qv:
882 url = '%s?%s' % (url, util.urlreq.urlencode(qv))
882 url = '%s?%s' % (url, util.urlreq.urlencode(qv))
883 return url
883 return url
884
884
885 def _fetch(self, burl):
885 def _fetch(self, burl):
886 try:
886 try:
887 resp = url.open(self.ui, burl)
887 resp = url.open(self.ui, burl)
888 return json.loads(resp.read())
888 return json.loads(resp.read())
889 except util.urlerr.httperror as inst:
889 except util.urlerr.httperror as inst:
890 if inst.code == 401:
890 if inst.code == 401:
891 raise error.Abort(_('authorization failed'))
891 raise error.Abort(_('authorization failed'))
892 if inst.code == 404:
892 if inst.code == 404:
893 raise NotFound()
893 raise NotFound()
894 else:
894 else:
895 raise
895 raise
896
896
897 def _submit(self, burl, data, method='POST'):
897 def _submit(self, burl, data, method='POST'):
898 data = json.dumps(data)
898 data = json.dumps(data)
899 if method == 'PUT':
899 if method == 'PUT':
900 class putrequest(util.urlreq.request):
900 class putrequest(util.urlreq.request):
901 def get_method(self):
901 def get_method(self):
902 return 'PUT'
902 return 'PUT'
903 request_type = putrequest
903 request_type = putrequest
904 else:
904 else:
905 request_type = util.urlreq.request
905 request_type = util.urlreq.request
906 req = request_type(burl, data,
906 req = request_type(burl, data,
907 {'Content-Type': 'application/json'})
907 {'Content-Type': 'application/json'})
908 try:
908 try:
909 resp = url.opener(self.ui).open(req)
909 resp = url.opener(self.ui).open(req)
910 return json.loads(resp.read())
910 return json.loads(resp.read())
911 except util.urlerr.httperror as inst:
911 except util.urlerr.httperror as inst:
912 if inst.code == 401:
912 if inst.code == 401:
913 raise error.Abort(_('authorization failed'))
913 raise error.Abort(_('authorization failed'))
914 if inst.code == 404:
914 if inst.code == 404:
915 raise NotFound()
915 raise NotFound()
916 else:
916 else:
917 raise
917 raise
918
918
919 def filter_real_bug_ids(self, bugs):
919 def filter_real_bug_ids(self, bugs):
920 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
920 '''remove bug IDs that do not exist in Bugzilla from bugs.'''
921 badbugs = set()
921 badbugs = set()
922 for bugid in bugs:
922 for bugid in bugs:
923 burl = self.apiurl(('bug', bugid), include_fields='status')
923 burl = self.apiurl(('bug', bugid), include_fields='status')
924 try:
924 try:
925 self._fetch(burl)
925 self._fetch(burl)
926 except NotFound:
926 except NotFound:
927 badbugs.add(bugid)
927 badbugs.add(bugid)
928 for bugid in badbugs:
928 for bugid in badbugs:
929 del bugs[bugid]
929 del bugs[bugid]
930
930
931 def filter_cset_known_bug_ids(self, node, bugs):
931 def filter_cset_known_bug_ids(self, node, bugs):
932 '''remove bug IDs where node occurs in comment text from bugs.'''
932 '''remove bug IDs where node occurs in comment text from bugs.'''
933 sn = short(node)
933 sn = short(node)
934 for bugid in bugs.keys():
934 for bugid in bugs.keys():
935 burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
935 burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
936 result = self._fetch(burl)
936 result = self._fetch(burl)
937 comments = result['bugs'][str(bugid)]['comments']
937 comments = result['bugs'][str(bugid)]['comments']
938 if any(sn in c['text'] for c in comments):
938 if any(sn in c['text'] for c in comments):
939 self.ui.status(_('bug %d already knows about changeset %s\n') %
939 self.ui.status(_('bug %d already knows about changeset %s\n') %
940 (bugid, sn))
940 (bugid, sn))
941 del bugs[bugid]
941 del bugs[bugid]
942
942
943 def updatebug(self, bugid, newstate, text, committer):
943 def updatebug(self, bugid, newstate, text, committer):
944 '''update the specified bug. Add comment text and set new states.
944 '''update the specified bug. Add comment text and set new states.
945
945
946 If possible add the comment as being from the committer of
946 If possible add the comment as being from the committer of
947 the changeset. Otherwise use the default Bugzilla user.
947 the changeset. Otherwise use the default Bugzilla user.
948 '''
948 '''
949 bugmod = {}
949 bugmod = {}
950 if 'hours' in newstate:
950 if 'hours' in newstate:
951 bugmod['work_time'] = newstate['hours']
951 bugmod['work_time'] = newstate['hours']
952 if 'fix' in newstate:
952 if 'fix' in newstate:
953 bugmod['status'] = self.fixstatus
953 bugmod['status'] = self.fixstatus
954 bugmod['resolution'] = self.fixresolution
954 bugmod['resolution'] = self.fixresolution
955 if bugmod:
955 if bugmod:
956 # if we have to change the bugs state do it here
956 # if we have to change the bugs state do it here
957 bugmod['comment'] = {
957 bugmod['comment'] = {
958 'comment': text,
958 'comment': text,
959 'is_private': False,
959 'is_private': False,
960 'is_markdown': False,
960 'is_markdown': False,
961 }
961 }
962 burl = self.apiurl(('bug', bugid))
962 burl = self.apiurl(('bug', bugid))
963 self._submit(burl, bugmod, method='PUT')
963 self._submit(burl, bugmod, method='PUT')
964 self.ui.debug('updated bug %s\n' % bugid)
964 self.ui.debug('updated bug %s\n' % bugid)
965 else:
965 else:
966 burl = self.apiurl(('bug', bugid, 'comment'))
966 burl = self.apiurl(('bug', bugid, 'comment'))
967 self._submit(burl, {
967 self._submit(burl, {
968 'comment': text,
968 'comment': text,
969 'is_private': False,
969 'is_private': False,
970 'is_markdown': False,
970 'is_markdown': False,
971 })
971 })
972 self.ui.debug('added comment to bug %s\n' % bugid)
972 self.ui.debug('added comment to bug %s\n' % bugid)
973
973
974 def notify(self, bugs, committer):
974 def notify(self, bugs, committer):
975 '''Force sending of Bugzilla notification emails.
975 '''Force sending of Bugzilla notification emails.
976
976
977 Only required if the access method does not trigger notification
977 Only required if the access method does not trigger notification
978 emails automatically.
978 emails automatically.
979 '''
979 '''
980 pass
980 pass
981
981
982 class bugzilla(object):
982 class bugzilla(object):
983 # supported versions of bugzilla. different versions have
983 # supported versions of bugzilla. different versions have
984 # different schemas.
984 # different schemas.
985 _versions = {
985 _versions = {
986 '2.16': bzmysql,
986 '2.16': bzmysql,
987 '2.18': bzmysql_2_18,
987 '2.18': bzmysql_2_18,
988 '3.0': bzmysql_3_0,
988 '3.0': bzmysql_3_0,
989 'xmlrpc': bzxmlrpc,
989 'xmlrpc': bzxmlrpc,
990 'xmlrpc+email': bzxmlrpcemail,
990 'xmlrpc+email': bzxmlrpcemail,
991 'restapi': bzrestapi,
991 'restapi': bzrestapi,
992 }
992 }
993
993
994 def __init__(self, ui, repo):
994 def __init__(self, ui, repo):
995 self.ui = ui
995 self.ui = ui
996 self.repo = repo
996 self.repo = repo
997
997
998 bzversion = self.ui.config('bugzilla', 'version')
998 bzversion = self.ui.config('bugzilla', 'version')
999 try:
999 try:
1000 bzclass = bugzilla._versions[bzversion]
1000 bzclass = bugzilla._versions[bzversion]
1001 except KeyError:
1001 except KeyError:
1002 raise error.Abort(_('bugzilla version %s not supported') %
1002 raise error.Abort(_('bugzilla version %s not supported') %
1003 bzversion)
1003 bzversion)
1004 self.bzdriver = bzclass(self.ui)
1004 self.bzdriver = bzclass(self.ui)
1005
1005
1006 self.bug_re = re.compile(
1006 self.bug_re = re.compile(
1007 self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
1007 self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
1008 self.fix_re = re.compile(
1008 self.fix_re = re.compile(
1009 self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
1009 self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
1010 self.split_re = re.compile(r'\D+')
1010 self.split_re = re.compile(r'\D+')
1011
1011
1012 def find_bugs(self, ctx):
1012 def find_bugs(self, ctx):
1013 '''return bugs dictionary created from commit comment.
1013 '''return bugs dictionary created from commit comment.
1014
1014
1015 Extract bug info from changeset comments. Filter out any that are
1015 Extract bug info from changeset comments. Filter out any that are
1016 not known to Bugzilla, and any that already have a reference to
1016 not known to Bugzilla, and any that already have a reference to
1017 the given changeset in their comments.
1017 the given changeset in their comments.
1018 '''
1018 '''
1019 start = 0
1019 start = 0
1020 hours = 0.0
1020 hours = 0.0
1021 bugs = {}
1021 bugs = {}
1022 bugmatch = self.bug_re.search(ctx.description(), start)
1022 bugmatch = self.bug_re.search(ctx.description(), start)
1023 fixmatch = self.fix_re.search(ctx.description(), start)
1023 fixmatch = self.fix_re.search(ctx.description(), start)
1024 while True:
1024 while True:
1025 bugattribs = {}
1025 bugattribs = {}
1026 if not bugmatch and not fixmatch:
1026 if not bugmatch and not fixmatch:
1027 break
1027 break
1028 if not bugmatch:
1028 if not bugmatch:
1029 m = fixmatch
1029 m = fixmatch
1030 elif not fixmatch:
1030 elif not fixmatch:
1031 m = bugmatch
1031 m = bugmatch
1032 else:
1032 else:
1033 if bugmatch.start() < fixmatch.start():
1033 if bugmatch.start() < fixmatch.start():
1034 m = bugmatch
1034 m = bugmatch
1035 else:
1035 else:
1036 m = fixmatch
1036 m = fixmatch
1037 start = m.end()
1037 start = m.end()
1038 if m is bugmatch:
1038 if m is bugmatch:
1039 bugmatch = self.bug_re.search(ctx.description(), start)
1039 bugmatch = self.bug_re.search(ctx.description(), start)
1040 if 'fix' in bugattribs:
1040 if 'fix' in bugattribs:
1041 del bugattribs['fix']
1041 del bugattribs['fix']
1042 else:
1042 else:
1043 fixmatch = self.fix_re.search(ctx.description(), start)
1043 fixmatch = self.fix_re.search(ctx.description(), start)
1044 bugattribs['fix'] = None
1044 bugattribs['fix'] = None
1045
1045
1046 try:
1046 try:
1047 ids = m.group('ids')
1047 ids = m.group('ids')
1048 except IndexError:
1048 except IndexError:
1049 ids = m.group(1)
1049 ids = m.group(1)
1050 try:
1050 try:
1051 hours = float(m.group('hours'))
1051 hours = float(m.group('hours'))
1052 bugattribs['hours'] = hours
1052 bugattribs['hours'] = hours
1053 except IndexError:
1053 except IndexError:
1054 pass
1054 pass
1055 except TypeError:
1055 except TypeError:
1056 pass
1056 pass
1057 except ValueError:
1057 except ValueError:
1058 self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
1058 self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
1059
1059
1060 for id in self.split_re.split(ids):
1060 for id in self.split_re.split(ids):
1061 if not id:
1061 if not id:
1062 continue
1062 continue
1063 bugs[int(id)] = bugattribs
1063 bugs[int(id)] = bugattribs
1064 if bugs:
1064 if bugs:
1065 self.bzdriver.filter_real_bug_ids(bugs)
1065 self.bzdriver.filter_real_bug_ids(bugs)
1066 if bugs:
1066 if bugs:
1067 self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
1067 self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
1068 return bugs
1068 return bugs
1069
1069
1070 def update(self, bugid, newstate, ctx):
1070 def update(self, bugid, newstate, ctx):
1071 '''update bugzilla bug with reference to changeset.'''
1071 '''update bugzilla bug with reference to changeset.'''
1072
1072
1073 def webroot(root):
1073 def webroot(root):
1074 '''strip leading prefix of repo root and turn into
1074 '''strip leading prefix of repo root and turn into
1075 url-safe path.'''
1075 url-safe path.'''
1076 count = int(self.ui.config('bugzilla', 'strip'))
1076 count = int(self.ui.config('bugzilla', 'strip'))
1077 root = util.pconvert(root)
1077 root = util.pconvert(root)
1078 while count > 0:
1078 while count > 0:
1079 c = root.find('/')
1079 c = root.find('/')
1080 if c == -1:
1080 if c == -1:
1081 break
1081 break
1082 root = root[c + 1:]
1082 root = root[c + 1:]
1083 count -= 1
1083 count -= 1
1084 return root
1084 return root
1085
1085
1086 mapfile = None
1086 mapfile = None
1087 tmpl = self.ui.config('bugzilla', 'template')
1087 tmpl = self.ui.config('bugzilla', 'template')
1088 if not tmpl:
1088 if not tmpl:
1089 mapfile = self.ui.config('bugzilla', 'style')
1089 mapfile = self.ui.config('bugzilla', 'style')
1090 if not mapfile and not tmpl:
1090 if not mapfile and not tmpl:
1091 tmpl = _('changeset {node|short} in repo {root} refers '
1091 tmpl = _('changeset {node|short} in repo {root} refers '
1092 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
1092 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
1093 spec = logcmdutil.templatespec(tmpl, mapfile)
1093 spec = logcmdutil.templatespec(tmpl, mapfile)
1094 t = logcmdutil.changesettemplater(self.ui, self.repo, spec,
1094 t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
1095 False, None, False)
1096 self.ui.pushbuffer()
1095 self.ui.pushbuffer()
1097 t.show(ctx, changes=ctx.changeset(),
1096 t.show(ctx, changes=ctx.changeset(),
1098 bug=str(bugid),
1097 bug=str(bugid),
1099 hgweb=self.ui.config('web', 'baseurl'),
1098 hgweb=self.ui.config('web', 'baseurl'),
1100 root=self.repo.root,
1099 root=self.repo.root,
1101 webroot=webroot(self.repo.root))
1100 webroot=webroot(self.repo.root))
1102 data = self.ui.popbuffer()
1101 data = self.ui.popbuffer()
1103 self.bzdriver.updatebug(bugid, newstate, data, util.email(ctx.user()))
1102 self.bzdriver.updatebug(bugid, newstate, data, util.email(ctx.user()))
1104
1103
1105 def notify(self, bugs, committer):
1104 def notify(self, bugs, committer):
1106 '''ensure Bugzilla users are notified of bug change.'''
1105 '''ensure Bugzilla users are notified of bug change.'''
1107 self.bzdriver.notify(bugs, committer)
1106 self.bzdriver.notify(bugs, committer)
1108
1107
1109 def hook(ui, repo, hooktype, node=None, **kwargs):
1108 def hook(ui, repo, hooktype, node=None, **kwargs):
1110 '''add comment to bugzilla for each changeset that refers to a
1109 '''add comment to bugzilla for each changeset that refers to a
1111 bugzilla bug id. only add a comment once per bug, so same change
1110 bugzilla bug id. only add a comment once per bug, so same change
1112 seen multiple times does not fill bug with duplicate data.'''
1111 seen multiple times does not fill bug with duplicate data.'''
1113 if node is None:
1112 if node is None:
1114 raise error.Abort(_('hook type %s does not pass a changeset id') %
1113 raise error.Abort(_('hook type %s does not pass a changeset id') %
1115 hooktype)
1114 hooktype)
1116 try:
1115 try:
1117 bz = bugzilla(ui, repo)
1116 bz = bugzilla(ui, repo)
1118 ctx = repo[node]
1117 ctx = repo[node]
1119 bugs = bz.find_bugs(ctx)
1118 bugs = bz.find_bugs(ctx)
1120 if bugs:
1119 if bugs:
1121 for bug in bugs:
1120 for bug in bugs:
1122 bz.update(bug, bugs[bug], ctx)
1121 bz.update(bug, bugs[bug], ctx)
1123 bz.notify(bugs, util.email(ctx.user()))
1122 bz.notify(bugs, util.email(ctx.user()))
1124 except Exception as e:
1123 except Exception as e:
1125 raise error.Abort(_('Bugzilla error: %s') % e)
1124 raise error.Abort(_('Bugzilla error: %s') % e)
@@ -1,519 +1,518 b''
1 # journal.py
1 # journal.py
2 #
2 #
3 # Copyright 2014-2016 Facebook, Inc.
3 # Copyright 2014-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """track previous positions of bookmarks (EXPERIMENTAL)
7 """track previous positions of bookmarks (EXPERIMENTAL)
8
8
9 This extension adds a new command: `hg journal`, which shows you where
9 This extension adds a new command: `hg journal`, which shows you where
10 bookmarks were previously located.
10 bookmarks were previously located.
11
11
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import collections
16 import collections
17 import errno
17 import errno
18 import os
18 import os
19 import weakref
19 import weakref
20
20
21 from mercurial.i18n import _
21 from mercurial.i18n import _
22
22
23 from mercurial import (
23 from mercurial import (
24 bookmarks,
24 bookmarks,
25 cmdutil,
25 cmdutil,
26 dispatch,
26 dispatch,
27 error,
27 error,
28 extensions,
28 extensions,
29 hg,
29 hg,
30 localrepo,
30 localrepo,
31 lock,
31 lock,
32 logcmdutil,
32 logcmdutil,
33 node,
33 node,
34 pycompat,
34 pycompat,
35 registrar,
35 registrar,
36 util,
36 util,
37 )
37 )
38
38
39 from . import share
39 from . import share
40
40
41 cmdtable = {}
41 cmdtable = {}
42 command = registrar.command(cmdtable)
42 command = registrar.command(cmdtable)
43
43
44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # be specifying the version(s) of Mercurial they are tested with, or
46 # be specifying the version(s) of Mercurial they are tested with, or
47 # leave the attribute unspecified.
47 # leave the attribute unspecified.
48 testedwith = 'ships-with-hg-core'
48 testedwith = 'ships-with-hg-core'
49
49
50 # storage format version; increment when the format changes
50 # storage format version; increment when the format changes
51 storageversion = 0
51 storageversion = 0
52
52
53 # namespaces
53 # namespaces
54 bookmarktype = 'bookmark'
54 bookmarktype = 'bookmark'
55 wdirparenttype = 'wdirparent'
55 wdirparenttype = 'wdirparent'
56 # In a shared repository, what shared feature name is used
56 # In a shared repository, what shared feature name is used
57 # to indicate this namespace is shared with the source?
57 # to indicate this namespace is shared with the source?
58 sharednamespaces = {
58 sharednamespaces = {
59 bookmarktype: hg.sharedbookmarks,
59 bookmarktype: hg.sharedbookmarks,
60 }
60 }
61
61
62 # Journal recording, register hooks and storage object
62 # Journal recording, register hooks and storage object
63 def extsetup(ui):
63 def extsetup(ui):
64 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
64 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
65 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
65 extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
66 extensions.wrapfilecache(
66 extensions.wrapfilecache(
67 localrepo.localrepository, 'dirstate', wrapdirstate)
67 localrepo.localrepository, 'dirstate', wrapdirstate)
68 extensions.wrapfunction(hg, 'postshare', wrappostshare)
68 extensions.wrapfunction(hg, 'postshare', wrappostshare)
69 extensions.wrapfunction(hg, 'copystore', unsharejournal)
69 extensions.wrapfunction(hg, 'copystore', unsharejournal)
70
70
71 def reposetup(ui, repo):
71 def reposetup(ui, repo):
72 if repo.local():
72 if repo.local():
73 repo.journal = journalstorage(repo)
73 repo.journal = journalstorage(repo)
74 repo._wlockfreeprefix.add('namejournal')
74 repo._wlockfreeprefix.add('namejournal')
75
75
76 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
76 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
77 if cached:
77 if cached:
78 # already instantiated dirstate isn't yet marked as
78 # already instantiated dirstate isn't yet marked as
79 # "journal"-ing, even though repo.dirstate() was already
79 # "journal"-ing, even though repo.dirstate() was already
80 # wrapped by own wrapdirstate()
80 # wrapped by own wrapdirstate()
81 _setupdirstate(repo, dirstate)
81 _setupdirstate(repo, dirstate)
82
82
83 def runcommand(orig, lui, repo, cmd, fullargs, *args):
83 def runcommand(orig, lui, repo, cmd, fullargs, *args):
84 """Track the command line options for recording in the journal"""
84 """Track the command line options for recording in the journal"""
85 journalstorage.recordcommand(*fullargs)
85 journalstorage.recordcommand(*fullargs)
86 return orig(lui, repo, cmd, fullargs, *args)
86 return orig(lui, repo, cmd, fullargs, *args)
87
87
88 def _setupdirstate(repo, dirstate):
88 def _setupdirstate(repo, dirstate):
89 dirstate.journalstorage = repo.journal
89 dirstate.journalstorage = repo.journal
90 dirstate.addparentchangecallback('journal', recorddirstateparents)
90 dirstate.addparentchangecallback('journal', recorddirstateparents)
91
91
92 # hooks to record dirstate changes
92 # hooks to record dirstate changes
93 def wrapdirstate(orig, repo):
93 def wrapdirstate(orig, repo):
94 """Make journal storage available to the dirstate object"""
94 """Make journal storage available to the dirstate object"""
95 dirstate = orig(repo)
95 dirstate = orig(repo)
96 if util.safehasattr(repo, 'journal'):
96 if util.safehasattr(repo, 'journal'):
97 _setupdirstate(repo, dirstate)
97 _setupdirstate(repo, dirstate)
98 return dirstate
98 return dirstate
99
99
100 def recorddirstateparents(dirstate, old, new):
100 def recorddirstateparents(dirstate, old, new):
101 """Records all dirstate parent changes in the journal."""
101 """Records all dirstate parent changes in the journal."""
102 old = list(old)
102 old = list(old)
103 new = list(new)
103 new = list(new)
104 if util.safehasattr(dirstate, 'journalstorage'):
104 if util.safehasattr(dirstate, 'journalstorage'):
105 # only record two hashes if there was a merge
105 # only record two hashes if there was a merge
106 oldhashes = old[:1] if old[1] == node.nullid else old
106 oldhashes = old[:1] if old[1] == node.nullid else old
107 newhashes = new[:1] if new[1] == node.nullid else new
107 newhashes = new[:1] if new[1] == node.nullid else new
108 dirstate.journalstorage.record(
108 dirstate.journalstorage.record(
109 wdirparenttype, '.', oldhashes, newhashes)
109 wdirparenttype, '.', oldhashes, newhashes)
110
110
111 # hooks to record bookmark changes (both local and remote)
111 # hooks to record bookmark changes (both local and remote)
112 def recordbookmarks(orig, store, fp):
112 def recordbookmarks(orig, store, fp):
113 """Records all bookmark changes in the journal."""
113 """Records all bookmark changes in the journal."""
114 repo = store._repo
114 repo = store._repo
115 if util.safehasattr(repo, 'journal'):
115 if util.safehasattr(repo, 'journal'):
116 oldmarks = bookmarks.bmstore(repo)
116 oldmarks = bookmarks.bmstore(repo)
117 for mark, value in store.iteritems():
117 for mark, value in store.iteritems():
118 oldvalue = oldmarks.get(mark, node.nullid)
118 oldvalue = oldmarks.get(mark, node.nullid)
119 if value != oldvalue:
119 if value != oldvalue:
120 repo.journal.record(bookmarktype, mark, oldvalue, value)
120 repo.journal.record(bookmarktype, mark, oldvalue, value)
121 return orig(store, fp)
121 return orig(store, fp)
122
122
123 # shared repository support
123 # shared repository support
124 def _readsharedfeatures(repo):
124 def _readsharedfeatures(repo):
125 """A set of shared features for this repository"""
125 """A set of shared features for this repository"""
126 try:
126 try:
127 return set(repo.vfs.read('shared').splitlines())
127 return set(repo.vfs.read('shared').splitlines())
128 except IOError as inst:
128 except IOError as inst:
129 if inst.errno != errno.ENOENT:
129 if inst.errno != errno.ENOENT:
130 raise
130 raise
131 return set()
131 return set()
132
132
133 def _mergeentriesiter(*iterables, **kwargs):
133 def _mergeentriesiter(*iterables, **kwargs):
134 """Given a set of sorted iterables, yield the next entry in merged order
134 """Given a set of sorted iterables, yield the next entry in merged order
135
135
136 Note that by default entries go from most recent to oldest.
136 Note that by default entries go from most recent to oldest.
137 """
137 """
138 order = kwargs.pop(r'order', max)
138 order = kwargs.pop(r'order', max)
139 iterables = [iter(it) for it in iterables]
139 iterables = [iter(it) for it in iterables]
140 # this tracks still active iterables; iterables are deleted as they are
140 # this tracks still active iterables; iterables are deleted as they are
141 # exhausted, which is why this is a dictionary and why each entry also
141 # exhausted, which is why this is a dictionary and why each entry also
142 # stores the key. Entries are mutable so we can store the next value each
142 # stores the key. Entries are mutable so we can store the next value each
143 # time.
143 # time.
144 iterable_map = {}
144 iterable_map = {}
145 for key, it in enumerate(iterables):
145 for key, it in enumerate(iterables):
146 try:
146 try:
147 iterable_map[key] = [next(it), key, it]
147 iterable_map[key] = [next(it), key, it]
148 except StopIteration:
148 except StopIteration:
149 # empty entry, can be ignored
149 # empty entry, can be ignored
150 pass
150 pass
151
151
152 while iterable_map:
152 while iterable_map:
153 value, key, it = order(iterable_map.itervalues())
153 value, key, it = order(iterable_map.itervalues())
154 yield value
154 yield value
155 try:
155 try:
156 iterable_map[key][0] = next(it)
156 iterable_map[key][0] = next(it)
157 except StopIteration:
157 except StopIteration:
158 # this iterable is empty, remove it from consideration
158 # this iterable is empty, remove it from consideration
159 del iterable_map[key]
159 del iterable_map[key]
160
160
161 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
161 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
162 """Mark this shared working copy as sharing journal information"""
162 """Mark this shared working copy as sharing journal information"""
163 with destrepo.wlock():
163 with destrepo.wlock():
164 orig(sourcerepo, destrepo, **kwargs)
164 orig(sourcerepo, destrepo, **kwargs)
165 with destrepo.vfs('shared', 'a') as fp:
165 with destrepo.vfs('shared', 'a') as fp:
166 fp.write('journal\n')
166 fp.write('journal\n')
167
167
168 def unsharejournal(orig, ui, repo, repopath):
168 def unsharejournal(orig, ui, repo, repopath):
169 """Copy shared journal entries into this repo when unsharing"""
169 """Copy shared journal entries into this repo when unsharing"""
170 if (repo.path == repopath and repo.shared() and
170 if (repo.path == repopath and repo.shared() and
171 util.safehasattr(repo, 'journal')):
171 util.safehasattr(repo, 'journal')):
172 sharedrepo = share._getsrcrepo(repo)
172 sharedrepo = share._getsrcrepo(repo)
173 sharedfeatures = _readsharedfeatures(repo)
173 sharedfeatures = _readsharedfeatures(repo)
174 if sharedrepo and sharedfeatures > {'journal'}:
174 if sharedrepo and sharedfeatures > {'journal'}:
175 # there is a shared repository and there are shared journal entries
175 # there is a shared repository and there are shared journal entries
176 # to copy. move shared date over from source to destination but
176 # to copy. move shared date over from source to destination but
177 # move the local file first
177 # move the local file first
178 if repo.vfs.exists('namejournal'):
178 if repo.vfs.exists('namejournal'):
179 journalpath = repo.vfs.join('namejournal')
179 journalpath = repo.vfs.join('namejournal')
180 util.rename(journalpath, journalpath + '.bak')
180 util.rename(journalpath, journalpath + '.bak')
181 storage = repo.journal
181 storage = repo.journal
182 local = storage._open(
182 local = storage._open(
183 repo.vfs, filename='namejournal.bak', _newestfirst=False)
183 repo.vfs, filename='namejournal.bak', _newestfirst=False)
184 shared = (
184 shared = (
185 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
185 e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
186 if sharednamespaces.get(e.namespace) in sharedfeatures)
186 if sharednamespaces.get(e.namespace) in sharedfeatures)
187 for entry in _mergeentriesiter(local, shared, order=min):
187 for entry in _mergeentriesiter(local, shared, order=min):
188 storage._write(repo.vfs, entry)
188 storage._write(repo.vfs, entry)
189
189
190 return orig(ui, repo, repopath)
190 return orig(ui, repo, repopath)
191
191
192 class journalentry(collections.namedtuple(
192 class journalentry(collections.namedtuple(
193 u'journalentry',
193 u'journalentry',
194 u'timestamp user command namespace name oldhashes newhashes')):
194 u'timestamp user command namespace name oldhashes newhashes')):
195 """Individual journal entry
195 """Individual journal entry
196
196
197 * timestamp: a mercurial (time, timezone) tuple
197 * timestamp: a mercurial (time, timezone) tuple
198 * user: the username that ran the command
198 * user: the username that ran the command
199 * namespace: the entry namespace, an opaque string
199 * namespace: the entry namespace, an opaque string
200 * name: the name of the changed item, opaque string with meaning in the
200 * name: the name of the changed item, opaque string with meaning in the
201 namespace
201 namespace
202 * command: the hg command that triggered this record
202 * command: the hg command that triggered this record
203 * oldhashes: a tuple of one or more binary hashes for the old location
203 * oldhashes: a tuple of one or more binary hashes for the old location
204 * newhashes: a tuple of one or more binary hashes for the new location
204 * newhashes: a tuple of one or more binary hashes for the new location
205
205
206 Handles serialisation from and to the storage format. Fields are
206 Handles serialisation from and to the storage format. Fields are
207 separated by newlines, hashes are written out in hex separated by commas,
207 separated by newlines, hashes are written out in hex separated by commas,
208 timestamp and timezone are separated by a space.
208 timestamp and timezone are separated by a space.
209
209
210 """
210 """
211 @classmethod
211 @classmethod
212 def fromstorage(cls, line):
212 def fromstorage(cls, line):
213 (time, user, command, namespace, name,
213 (time, user, command, namespace, name,
214 oldhashes, newhashes) = line.split('\n')
214 oldhashes, newhashes) = line.split('\n')
215 timestamp, tz = time.split()
215 timestamp, tz = time.split()
216 timestamp, tz = float(timestamp), int(tz)
216 timestamp, tz = float(timestamp), int(tz)
217 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
217 oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
218 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
218 newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
219 return cls(
219 return cls(
220 (timestamp, tz), user, command, namespace, name,
220 (timestamp, tz), user, command, namespace, name,
221 oldhashes, newhashes)
221 oldhashes, newhashes)
222
222
223 def __str__(self):
223 def __str__(self):
224 """String representation for storage"""
224 """String representation for storage"""
225 time = ' '.join(map(str, self.timestamp))
225 time = ' '.join(map(str, self.timestamp))
226 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
226 oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
227 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
227 newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
228 return '\n'.join((
228 return '\n'.join((
229 time, self.user, self.command, self.namespace, self.name,
229 time, self.user, self.command, self.namespace, self.name,
230 oldhashes, newhashes))
230 oldhashes, newhashes))
231
231
232 class journalstorage(object):
232 class journalstorage(object):
233 """Storage for journal entries
233 """Storage for journal entries
234
234
235 Entries are divided over two files; one with entries that pertain to the
235 Entries are divided over two files; one with entries that pertain to the
236 local working copy *only*, and one with entries that are shared across
236 local working copy *only*, and one with entries that are shared across
237 multiple working copies when shared using the share extension.
237 multiple working copies when shared using the share extension.
238
238
239 Entries are stored with NUL bytes as separators. See the journalentry
239 Entries are stored with NUL bytes as separators. See the journalentry
240 class for the per-entry structure.
240 class for the per-entry structure.
241
241
242 The file format starts with an integer version, delimited by a NUL.
242 The file format starts with an integer version, delimited by a NUL.
243
243
244 This storage uses a dedicated lock; this makes it easier to avoid issues
244 This storage uses a dedicated lock; this makes it easier to avoid issues
245 with adding entries that added when the regular wlock is unlocked (e.g.
245 with adding entries that added when the regular wlock is unlocked (e.g.
246 the dirstate).
246 the dirstate).
247
247
248 """
248 """
249 _currentcommand = ()
249 _currentcommand = ()
250 _lockref = None
250 _lockref = None
251
251
252 def __init__(self, repo):
252 def __init__(self, repo):
253 self.user = util.getuser()
253 self.user = util.getuser()
254 self.ui = repo.ui
254 self.ui = repo.ui
255 self.vfs = repo.vfs
255 self.vfs = repo.vfs
256
256
257 # is this working copy using a shared storage?
257 # is this working copy using a shared storage?
258 self.sharedfeatures = self.sharedvfs = None
258 self.sharedfeatures = self.sharedvfs = None
259 if repo.shared():
259 if repo.shared():
260 features = _readsharedfeatures(repo)
260 features = _readsharedfeatures(repo)
261 sharedrepo = share._getsrcrepo(repo)
261 sharedrepo = share._getsrcrepo(repo)
262 if sharedrepo is not None and 'journal' in features:
262 if sharedrepo is not None and 'journal' in features:
263 self.sharedvfs = sharedrepo.vfs
263 self.sharedvfs = sharedrepo.vfs
264 self.sharedfeatures = features
264 self.sharedfeatures = features
265
265
266 # track the current command for recording in journal entries
266 # track the current command for recording in journal entries
267 @property
267 @property
268 def command(self):
268 def command(self):
269 commandstr = ' '.join(
269 commandstr = ' '.join(
270 map(util.shellquote, journalstorage._currentcommand))
270 map(util.shellquote, journalstorage._currentcommand))
271 if '\n' in commandstr:
271 if '\n' in commandstr:
272 # truncate multi-line commands
272 # truncate multi-line commands
273 commandstr = commandstr.partition('\n')[0] + ' ...'
273 commandstr = commandstr.partition('\n')[0] + ' ...'
274 return commandstr
274 return commandstr
275
275
276 @classmethod
276 @classmethod
277 def recordcommand(cls, *fullargs):
277 def recordcommand(cls, *fullargs):
278 """Set the current hg arguments, stored with recorded entries"""
278 """Set the current hg arguments, stored with recorded entries"""
279 # Set the current command on the class because we may have started
279 # Set the current command on the class because we may have started
280 # with a non-local repo (cloning for example).
280 # with a non-local repo (cloning for example).
281 cls._currentcommand = fullargs
281 cls._currentcommand = fullargs
282
282
283 def _currentlock(self, lockref):
283 def _currentlock(self, lockref):
284 """Returns the lock if it's held, or None if it's not.
284 """Returns the lock if it's held, or None if it's not.
285
285
286 (This is copied from the localrepo class)
286 (This is copied from the localrepo class)
287 """
287 """
288 if lockref is None:
288 if lockref is None:
289 return None
289 return None
290 l = lockref()
290 l = lockref()
291 if l is None or not l.held:
291 if l is None or not l.held:
292 return None
292 return None
293 return l
293 return l
294
294
295 def jlock(self, vfs):
295 def jlock(self, vfs):
296 """Create a lock for the journal file"""
296 """Create a lock for the journal file"""
297 if self._currentlock(self._lockref) is not None:
297 if self._currentlock(self._lockref) is not None:
298 raise error.Abort(_('journal lock does not support nesting'))
298 raise error.Abort(_('journal lock does not support nesting'))
299 desc = _('journal of %s') % vfs.base
299 desc = _('journal of %s') % vfs.base
300 try:
300 try:
301 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
301 l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
302 except error.LockHeld as inst:
302 except error.LockHeld as inst:
303 self.ui.warn(
303 self.ui.warn(
304 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
304 _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
305 # default to 600 seconds timeout
305 # default to 600 seconds timeout
306 l = lock.lock(
306 l = lock.lock(
307 vfs, 'namejournal.lock',
307 vfs, 'namejournal.lock',
308 self.ui.configint("ui", "timeout"), desc=desc)
308 self.ui.configint("ui", "timeout"), desc=desc)
309 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
309 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
310 self._lockref = weakref.ref(l)
310 self._lockref = weakref.ref(l)
311 return l
311 return l
312
312
313 def record(self, namespace, name, oldhashes, newhashes):
313 def record(self, namespace, name, oldhashes, newhashes):
314 """Record a new journal entry
314 """Record a new journal entry
315
315
316 * namespace: an opaque string; this can be used to filter on the type
316 * namespace: an opaque string; this can be used to filter on the type
317 of recorded entries.
317 of recorded entries.
318 * name: the name defining this entry; for bookmarks, this is the
318 * name: the name defining this entry; for bookmarks, this is the
319 bookmark name. Can be filtered on when retrieving entries.
319 bookmark name. Can be filtered on when retrieving entries.
320 * oldhashes and newhashes: each a single binary hash, or a list of
320 * oldhashes and newhashes: each a single binary hash, or a list of
321 binary hashes. These represent the old and new position of the named
321 binary hashes. These represent the old and new position of the named
322 item.
322 item.
323
323
324 """
324 """
325 if not isinstance(oldhashes, list):
325 if not isinstance(oldhashes, list):
326 oldhashes = [oldhashes]
326 oldhashes = [oldhashes]
327 if not isinstance(newhashes, list):
327 if not isinstance(newhashes, list):
328 newhashes = [newhashes]
328 newhashes = [newhashes]
329
329
330 entry = journalentry(
330 entry = journalentry(
331 util.makedate(), self.user, self.command, namespace, name,
331 util.makedate(), self.user, self.command, namespace, name,
332 oldhashes, newhashes)
332 oldhashes, newhashes)
333
333
334 vfs = self.vfs
334 vfs = self.vfs
335 if self.sharedvfs is not None:
335 if self.sharedvfs is not None:
336 # write to the shared repository if this feature is being
336 # write to the shared repository if this feature is being
337 # shared between working copies.
337 # shared between working copies.
338 if sharednamespaces.get(namespace) in self.sharedfeatures:
338 if sharednamespaces.get(namespace) in self.sharedfeatures:
339 vfs = self.sharedvfs
339 vfs = self.sharedvfs
340
340
341 self._write(vfs, entry)
341 self._write(vfs, entry)
342
342
343 def _write(self, vfs, entry):
343 def _write(self, vfs, entry):
344 with self.jlock(vfs):
344 with self.jlock(vfs):
345 version = None
345 version = None
346 # open file in amend mode to ensure it is created if missing
346 # open file in amend mode to ensure it is created if missing
347 with vfs('namejournal', mode='a+b') as f:
347 with vfs('namejournal', mode='a+b') as f:
348 f.seek(0, os.SEEK_SET)
348 f.seek(0, os.SEEK_SET)
349 # Read just enough bytes to get a version number (up to 2
349 # Read just enough bytes to get a version number (up to 2
350 # digits plus separator)
350 # digits plus separator)
351 version = f.read(3).partition('\0')[0]
351 version = f.read(3).partition('\0')[0]
352 if version and version != str(storageversion):
352 if version and version != str(storageversion):
353 # different version of the storage. Exit early (and not
353 # different version of the storage. Exit early (and not
354 # write anything) if this is not a version we can handle or
354 # write anything) if this is not a version we can handle or
355 # the file is corrupt. In future, perhaps rotate the file
355 # the file is corrupt. In future, perhaps rotate the file
356 # instead?
356 # instead?
357 self.ui.warn(
357 self.ui.warn(
358 _("unsupported journal file version '%s'\n") % version)
358 _("unsupported journal file version '%s'\n") % version)
359 return
359 return
360 if not version:
360 if not version:
361 # empty file, write version first
361 # empty file, write version first
362 f.write(str(storageversion) + '\0')
362 f.write(str(storageversion) + '\0')
363 f.seek(0, os.SEEK_END)
363 f.seek(0, os.SEEK_END)
364 f.write(str(entry) + '\0')
364 f.write(str(entry) + '\0')
365
365
366 def filtered(self, namespace=None, name=None):
366 def filtered(self, namespace=None, name=None):
367 """Yield all journal entries with the given namespace or name
367 """Yield all journal entries with the given namespace or name
368
368
369 Both the namespace and the name are optional; if neither is given all
369 Both the namespace and the name are optional; if neither is given all
370 entries in the journal are produced.
370 entries in the journal are produced.
371
371
372 Matching supports regular expressions by using the `re:` prefix
372 Matching supports regular expressions by using the `re:` prefix
373 (use `literal:` to match names or namespaces that start with `re:`)
373 (use `literal:` to match names or namespaces that start with `re:`)
374
374
375 """
375 """
376 if namespace is not None:
376 if namespace is not None:
377 namespace = util.stringmatcher(namespace)[-1]
377 namespace = util.stringmatcher(namespace)[-1]
378 if name is not None:
378 if name is not None:
379 name = util.stringmatcher(name)[-1]
379 name = util.stringmatcher(name)[-1]
380 for entry in self:
380 for entry in self:
381 if namespace is not None and not namespace(entry.namespace):
381 if namespace is not None and not namespace(entry.namespace):
382 continue
382 continue
383 if name is not None and not name(entry.name):
383 if name is not None and not name(entry.name):
384 continue
384 continue
385 yield entry
385 yield entry
386
386
387 def __iter__(self):
387 def __iter__(self):
388 """Iterate over the storage
388 """Iterate over the storage
389
389
390 Yields journalentry instances for each contained journal record.
390 Yields journalentry instances for each contained journal record.
391
391
392 """
392 """
393 local = self._open(self.vfs)
393 local = self._open(self.vfs)
394
394
395 if self.sharedvfs is None:
395 if self.sharedvfs is None:
396 return local
396 return local
397
397
398 # iterate over both local and shared entries, but only those
398 # iterate over both local and shared entries, but only those
399 # shared entries that are among the currently shared features
399 # shared entries that are among the currently shared features
400 shared = (
400 shared = (
401 e for e in self._open(self.sharedvfs)
401 e for e in self._open(self.sharedvfs)
402 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
402 if sharednamespaces.get(e.namespace) in self.sharedfeatures)
403 return _mergeentriesiter(local, shared)
403 return _mergeentriesiter(local, shared)
404
404
405 def _open(self, vfs, filename='namejournal', _newestfirst=True):
405 def _open(self, vfs, filename='namejournal', _newestfirst=True):
406 if not vfs.exists(filename):
406 if not vfs.exists(filename):
407 return
407 return
408
408
409 with vfs(filename) as f:
409 with vfs(filename) as f:
410 raw = f.read()
410 raw = f.read()
411
411
412 lines = raw.split('\0')
412 lines = raw.split('\0')
413 version = lines and lines[0]
413 version = lines and lines[0]
414 if version != str(storageversion):
414 if version != str(storageversion):
415 version = version or _('not available')
415 version = version or _('not available')
416 raise error.Abort(_("unknown journal file version '%s'") % version)
416 raise error.Abort(_("unknown journal file version '%s'") % version)
417
417
418 # Skip the first line, it's a version number. Normally we iterate over
418 # Skip the first line, it's a version number. Normally we iterate over
419 # these in reverse order to list newest first; only when copying across
419 # these in reverse order to list newest first; only when copying across
420 # a shared storage do we forgo reversing.
420 # a shared storage do we forgo reversing.
421 lines = lines[1:]
421 lines = lines[1:]
422 if _newestfirst:
422 if _newestfirst:
423 lines = reversed(lines)
423 lines = reversed(lines)
424 for line in lines:
424 for line in lines:
425 if not line:
425 if not line:
426 continue
426 continue
427 yield journalentry.fromstorage(line)
427 yield journalentry.fromstorage(line)
428
428
429 # journal reading
429 # journal reading
430 # log options that don't make sense for journal
430 # log options that don't make sense for journal
431 _ignoreopts = ('no-merges', 'graph')
431 _ignoreopts = ('no-merges', 'graph')
432 @command(
432 @command(
433 'journal', [
433 'journal', [
434 ('', 'all', None, 'show history for all names'),
434 ('', 'all', None, 'show history for all names'),
435 ('c', 'commits', None, 'show commit metadata'),
435 ('c', 'commits', None, 'show commit metadata'),
436 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
436 ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
437 '[OPTION]... [BOOKMARKNAME]')
437 '[OPTION]... [BOOKMARKNAME]')
438 def journal(ui, repo, *args, **opts):
438 def journal(ui, repo, *args, **opts):
439 """show the previous position of bookmarks and the working copy
439 """show the previous position of bookmarks and the working copy
440
440
441 The journal is used to see the previous commits that bookmarks and the
441 The journal is used to see the previous commits that bookmarks and the
442 working copy pointed to. By default the previous locations for the working
442 working copy pointed to. By default the previous locations for the working
443 copy. Passing a bookmark name will show all the previous positions of
443 copy. Passing a bookmark name will show all the previous positions of
444 that bookmark. Use the --all switch to show previous locations for all
444 that bookmark. Use the --all switch to show previous locations for all
445 bookmarks and the working copy; each line will then include the bookmark
445 bookmarks and the working copy; each line will then include the bookmark
446 name, or '.' for the working copy, as well.
446 name, or '.' for the working copy, as well.
447
447
448 If `name` starts with `re:`, the remainder of the name is treated as
448 If `name` starts with `re:`, the remainder of the name is treated as
449 a regular expression. To match a name that actually starts with `re:`,
449 a regular expression. To match a name that actually starts with `re:`,
450 use the prefix `literal:`.
450 use the prefix `literal:`.
451
451
452 By default hg journal only shows the commit hash and the command that was
452 By default hg journal only shows the commit hash and the command that was
453 running at that time. -v/--verbose will show the prior hash, the user, and
453 running at that time. -v/--verbose will show the prior hash, the user, and
454 the time at which it happened.
454 the time at which it happened.
455
455
456 Use -c/--commits to output log information on each commit hash; at this
456 Use -c/--commits to output log information on each commit hash; at this
457 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
457 point you can use the usual `--patch`, `--git`, `--stat` and `--template`
458 switches to alter the log output for these.
458 switches to alter the log output for these.
459
459
460 `hg journal -T json` can be used to produce machine readable output.
460 `hg journal -T json` can be used to produce machine readable output.
461
461
462 """
462 """
463 opts = pycompat.byteskwargs(opts)
463 opts = pycompat.byteskwargs(opts)
464 name = '.'
464 name = '.'
465 if opts.get('all'):
465 if opts.get('all'):
466 if args:
466 if args:
467 raise error.Abort(
467 raise error.Abort(
468 _("You can't combine --all and filtering on a name"))
468 _("You can't combine --all and filtering on a name"))
469 name = None
469 name = None
470 if args:
470 if args:
471 name = args[0]
471 name = args[0]
472
472
473 fm = ui.formatter('journal', opts)
473 fm = ui.formatter('journal', opts)
474
474
475 if opts.get("template") != "json":
475 if opts.get("template") != "json":
476 if name is None:
476 if name is None:
477 displayname = _('the working copy and bookmarks')
477 displayname = _('the working copy and bookmarks')
478 else:
478 else:
479 displayname = "'%s'" % name
479 displayname = "'%s'" % name
480 ui.status(_("previous locations of %s:\n") % displayname)
480 ui.status(_("previous locations of %s:\n") % displayname)
481
481
482 limit = logcmdutil.getlimit(opts)
482 limit = logcmdutil.getlimit(opts)
483 entry = None
483 entry = None
484 ui.pager('journal')
484 ui.pager('journal')
485 for count, entry in enumerate(repo.journal.filtered(name=name)):
485 for count, entry in enumerate(repo.journal.filtered(name=name)):
486 if count == limit:
486 if count == limit:
487 break
487 break
488 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
488 newhashesstr = fm.formatlist(map(fm.hexfunc, entry.newhashes),
489 name='node', sep=',')
489 name='node', sep=',')
490 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
490 oldhashesstr = fm.formatlist(map(fm.hexfunc, entry.oldhashes),
491 name='node', sep=',')
491 name='node', sep=',')
492
492
493 fm.startitem()
493 fm.startitem()
494 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
494 fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
495 fm.write('newhashes', '%s', newhashesstr)
495 fm.write('newhashes', '%s', newhashesstr)
496 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
496 fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
497 fm.condwrite(
497 fm.condwrite(
498 opts.get('all') or name.startswith('re:'),
498 opts.get('all') or name.startswith('re:'),
499 'name', ' %-8s', entry.name)
499 'name', ' %-8s', entry.name)
500
500
501 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
501 timestring = fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
502 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
502 fm.condwrite(ui.verbose, 'date', ' %s', timestring)
503 fm.write('command', ' %s\n', entry.command)
503 fm.write('command', ' %s\n', entry.command)
504
504
505 if opts.get("commits"):
505 if opts.get("commits"):
506 displayer = logcmdutil.changesetdisplayer(ui, repo, opts,
506 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
507 buffered=False)
508 for hash in entry.newhashes:
507 for hash in entry.newhashes:
509 try:
508 try:
510 ctx = repo[hash]
509 ctx = repo[hash]
511 displayer.show(ctx)
510 displayer.show(ctx)
512 except error.RepoLookupError as e:
511 except error.RepoLookupError as e:
513 fm.write('repolookuperror', "%s\n\n", str(e))
512 fm.write('repolookuperror', "%s\n\n", str(e))
514 displayer.close()
513 displayer.close()
515
514
516 fm.end()
515 fm.end()
517
516
518 if entry is None:
517 if entry is None:
519 ui.status(_("no recorded locations\n"))
518 ui.status(_("no recorded locations\n"))
@@ -1,485 +1,484 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for sending email push notifications
8 '''hooks for sending email push notifications
9
9
10 This extension implements hooks to send email notifications when
10 This extension implements hooks to send email notifications when
11 changesets are sent from or received by the local repository.
11 changesets are sent from or received by the local repository.
12
12
13 First, enable the extension as explained in :hg:`help extensions`, and
13 First, enable the extension as explained in :hg:`help extensions`, and
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
14 register the hook you want to run. ``incoming`` and ``changegroup`` hooks
15 are run when changesets are received, while ``outgoing`` hooks are for
15 are run when changesets are received, while ``outgoing`` hooks are for
16 changesets sent to another repository::
16 changesets sent to another repository::
17
17
18 [hooks]
18 [hooks]
19 # one email for each incoming changeset
19 # one email for each incoming changeset
20 incoming.notify = python:hgext.notify.hook
20 incoming.notify = python:hgext.notify.hook
21 # one email for all incoming changesets
21 # one email for all incoming changesets
22 changegroup.notify = python:hgext.notify.hook
22 changegroup.notify = python:hgext.notify.hook
23
23
24 # one email for all outgoing changesets
24 # one email for all outgoing changesets
25 outgoing.notify = python:hgext.notify.hook
25 outgoing.notify = python:hgext.notify.hook
26
26
27 This registers the hooks. To enable notification, subscribers must
27 This registers the hooks. To enable notification, subscribers must
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
28 be assigned to repositories. The ``[usersubs]`` section maps multiple
29 repositories to a given recipient. The ``[reposubs]`` section maps
29 repositories to a given recipient. The ``[reposubs]`` section maps
30 multiple recipients to a single repository::
30 multiple recipients to a single repository::
31
31
32 [usersubs]
32 [usersubs]
33 # key is subscriber email, value is a comma-separated list of repo patterns
33 # key is subscriber email, value is a comma-separated list of repo patterns
34 user@host = pattern
34 user@host = pattern
35
35
36 [reposubs]
36 [reposubs]
37 # key is repo pattern, value is a comma-separated list of subscriber emails
37 # key is repo pattern, value is a comma-separated list of subscriber emails
38 pattern = user@host
38 pattern = user@host
39
39
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
40 A ``pattern`` is a ``glob`` matching the absolute path to a repository,
41 optionally combined with a revset expression. A revset expression, if
41 optionally combined with a revset expression. A revset expression, if
42 present, is separated from the glob by a hash. Example::
42 present, is separated from the glob by a hash. Example::
43
43
44 [reposubs]
44 [reposubs]
45 */widgets#branch(release) = qa-team@example.com
45 */widgets#branch(release) = qa-team@example.com
46
46
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
47 This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
48 branch triggers a notification in any repository ending in ``widgets``.
48 branch triggers a notification in any repository ending in ``widgets``.
49
49
50 In order to place them under direct user management, ``[usersubs]`` and
50 In order to place them under direct user management, ``[usersubs]`` and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
51 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
52 incorporated by reference::
52 incorporated by reference::
53
53
54 [notify]
54 [notify]
55 config = /path/to/subscriptionsfile
55 config = /path/to/subscriptionsfile
56
56
57 Notifications will not be sent until the ``notify.test`` value is set
57 Notifications will not be sent until the ``notify.test`` value is set
58 to ``False``; see below.
58 to ``False``; see below.
59
59
60 Notifications content can be tweaked with the following configuration entries:
60 Notifications content can be tweaked with the following configuration entries:
61
61
62 notify.test
62 notify.test
63 If ``True``, print messages to stdout instead of sending them. Default: True.
63 If ``True``, print messages to stdout instead of sending them. Default: True.
64
64
65 notify.sources
65 notify.sources
66 Space-separated list of change sources. Notifications are activated only
66 Space-separated list of change sources. Notifications are activated only
67 when a changeset's source is in this list. Sources may be:
67 when a changeset's source is in this list. Sources may be:
68
68
69 :``serve``: changesets received via http or ssh
69 :``serve``: changesets received via http or ssh
70 :``pull``: changesets received via ``hg pull``
70 :``pull``: changesets received via ``hg pull``
71 :``unbundle``: changesets received via ``hg unbundle``
71 :``unbundle``: changesets received via ``hg unbundle``
72 :``push``: changesets sent or received via ``hg push``
72 :``push``: changesets sent or received via ``hg push``
73 :``bundle``: changesets sent via ``hg unbundle``
73 :``bundle``: changesets sent via ``hg unbundle``
74
74
75 Default: serve.
75 Default: serve.
76
76
77 notify.strip
77 notify.strip
78 Number of leading slashes to strip from url paths. By default, notifications
78 Number of leading slashes to strip from url paths. By default, notifications
79 reference repositories with their absolute path. ``notify.strip`` lets you
79 reference repositories with their absolute path. ``notify.strip`` lets you
80 turn them into relative paths. For example, ``notify.strip=3`` will change
80 turn them into relative paths. For example, ``notify.strip=3`` will change
81 ``/long/path/repository`` into ``repository``. Default: 0.
81 ``/long/path/repository`` into ``repository``. Default: 0.
82
82
83 notify.domain
83 notify.domain
84 Default email domain for sender or recipients with no explicit domain.
84 Default email domain for sender or recipients with no explicit domain.
85
85
86 notify.style
86 notify.style
87 Style file to use when formatting emails.
87 Style file to use when formatting emails.
88
88
89 notify.template
89 notify.template
90 Template to use when formatting emails.
90 Template to use when formatting emails.
91
91
92 notify.incoming
92 notify.incoming
93 Template to use when run as an incoming hook, overriding ``notify.template``.
93 Template to use when run as an incoming hook, overriding ``notify.template``.
94
94
95 notify.outgoing
95 notify.outgoing
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
96 Template to use when run as an outgoing hook, overriding ``notify.template``.
97
97
98 notify.changegroup
98 notify.changegroup
99 Template to use when running as a changegroup hook, overriding
99 Template to use when running as a changegroup hook, overriding
100 ``notify.template``.
100 ``notify.template``.
101
101
102 notify.maxdiff
102 notify.maxdiff
103 Maximum number of diff lines to include in notification email. Set to 0
103 Maximum number of diff lines to include in notification email. Set to 0
104 to disable the diff, or -1 to include all of it. Default: 300.
104 to disable the diff, or -1 to include all of it. Default: 300.
105
105
106 notify.maxsubject
106 notify.maxsubject
107 Maximum number of characters in email's subject line. Default: 67.
107 Maximum number of characters in email's subject line. Default: 67.
108
108
109 notify.diffstat
109 notify.diffstat
110 Set to True to include a diffstat before diff content. Default: True.
110 Set to True to include a diffstat before diff content. Default: True.
111
111
112 notify.merge
112 notify.merge
113 If True, send notifications for merge changesets. Default: True.
113 If True, send notifications for merge changesets. Default: True.
114
114
115 notify.mbox
115 notify.mbox
116 If set, append mails to this mbox file instead of sending. Default: None.
116 If set, append mails to this mbox file instead of sending. Default: None.
117
117
118 notify.fromauthor
118 notify.fromauthor
119 If set, use the committer of the first changeset in a changegroup for
119 If set, use the committer of the first changeset in a changegroup for
120 the "From" field of the notification mail. If not set, take the user
120 the "From" field of the notification mail. If not set, take the user
121 from the pushing repo. Default: False.
121 from the pushing repo. Default: False.
122
122
123 If set, the following entries will also be used to customize the
123 If set, the following entries will also be used to customize the
124 notifications:
124 notifications:
125
125
126 email.from
126 email.from
127 Email ``From`` address to use if none can be found in the generated
127 Email ``From`` address to use if none can be found in the generated
128 email content.
128 email content.
129
129
130 web.baseurl
130 web.baseurl
131 Root repository URL to combine with repository paths when making
131 Root repository URL to combine with repository paths when making
132 references. See also ``notify.strip``.
132 references. See also ``notify.strip``.
133
133
134 '''
134 '''
135 from __future__ import absolute_import
135 from __future__ import absolute_import
136
136
137 import email
137 import email
138 import email.parser as emailparser
138 import email.parser as emailparser
139 import fnmatch
139 import fnmatch
140 import socket
140 import socket
141 import time
141 import time
142
142
143 from mercurial.i18n import _
143 from mercurial.i18n import _
144 from mercurial import (
144 from mercurial import (
145 error,
145 error,
146 logcmdutil,
146 logcmdutil,
147 mail,
147 mail,
148 patch,
148 patch,
149 registrar,
149 registrar,
150 util,
150 util,
151 )
151 )
152
152
153 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
153 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
154 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
154 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
155 # be specifying the version(s) of Mercurial they are tested with, or
155 # be specifying the version(s) of Mercurial they are tested with, or
156 # leave the attribute unspecified.
156 # leave the attribute unspecified.
157 testedwith = 'ships-with-hg-core'
157 testedwith = 'ships-with-hg-core'
158
158
159 configtable = {}
159 configtable = {}
160 configitem = registrar.configitem(configtable)
160 configitem = registrar.configitem(configtable)
161
161
162 configitem('notify', 'changegroup',
162 configitem('notify', 'changegroup',
163 default=None,
163 default=None,
164 )
164 )
165 configitem('notify', 'config',
165 configitem('notify', 'config',
166 default=None,
166 default=None,
167 )
167 )
168 configitem('notify', 'diffstat',
168 configitem('notify', 'diffstat',
169 default=True,
169 default=True,
170 )
170 )
171 configitem('notify', 'domain',
171 configitem('notify', 'domain',
172 default=None,
172 default=None,
173 )
173 )
174 configitem('notify', 'fromauthor',
174 configitem('notify', 'fromauthor',
175 default=None,
175 default=None,
176 )
176 )
177 configitem('notify', 'incoming',
177 configitem('notify', 'incoming',
178 default=None,
178 default=None,
179 )
179 )
180 configitem('notify', 'maxdiff',
180 configitem('notify', 'maxdiff',
181 default=300,
181 default=300,
182 )
182 )
183 configitem('notify', 'maxsubject',
183 configitem('notify', 'maxsubject',
184 default=67,
184 default=67,
185 )
185 )
186 configitem('notify', 'mbox',
186 configitem('notify', 'mbox',
187 default=None,
187 default=None,
188 )
188 )
189 configitem('notify', 'merge',
189 configitem('notify', 'merge',
190 default=True,
190 default=True,
191 )
191 )
192 configitem('notify', 'outgoing',
192 configitem('notify', 'outgoing',
193 default=None,
193 default=None,
194 )
194 )
195 configitem('notify', 'sources',
195 configitem('notify', 'sources',
196 default='serve',
196 default='serve',
197 )
197 )
198 configitem('notify', 'strip',
198 configitem('notify', 'strip',
199 default=0,
199 default=0,
200 )
200 )
201 configitem('notify', 'style',
201 configitem('notify', 'style',
202 default=None,
202 default=None,
203 )
203 )
204 configitem('notify', 'template',
204 configitem('notify', 'template',
205 default=None,
205 default=None,
206 )
206 )
207 configitem('notify', 'test',
207 configitem('notify', 'test',
208 default=True,
208 default=True,
209 )
209 )
210
210
211 # template for single changeset can include email headers.
211 # template for single changeset can include email headers.
212 single_template = '''
212 single_template = '''
213 Subject: changeset in {webroot}: {desc|firstline|strip}
213 Subject: changeset in {webroot}: {desc|firstline|strip}
214 From: {author}
214 From: {author}
215
215
216 changeset {node|short} in {root}
216 changeset {node|short} in {root}
217 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
217 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
218 description:
218 description:
219 \t{desc|tabindent|strip}
219 \t{desc|tabindent|strip}
220 '''.lstrip()
220 '''.lstrip()
221
221
222 # template for multiple changesets should not contain email headers,
222 # template for multiple changesets should not contain email headers,
223 # because only first set of headers will be used and result will look
223 # because only first set of headers will be used and result will look
224 # strange.
224 # strange.
225 multiple_template = '''
225 multiple_template = '''
226 changeset {node|short} in {root}
226 changeset {node|short} in {root}
227 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
227 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
228 summary: {desc|firstline}
228 summary: {desc|firstline}
229 '''
229 '''
230
230
231 deftemplates = {
231 deftemplates = {
232 'changegroup': multiple_template,
232 'changegroup': multiple_template,
233 }
233 }
234
234
235 class notifier(object):
235 class notifier(object):
236 '''email notification class.'''
236 '''email notification class.'''
237
237
238 def __init__(self, ui, repo, hooktype):
238 def __init__(self, ui, repo, hooktype):
239 self.ui = ui
239 self.ui = ui
240 cfg = self.ui.config('notify', 'config')
240 cfg = self.ui.config('notify', 'config')
241 if cfg:
241 if cfg:
242 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
242 self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
243 self.repo = repo
243 self.repo = repo
244 self.stripcount = int(self.ui.config('notify', 'strip'))
244 self.stripcount = int(self.ui.config('notify', 'strip'))
245 self.root = self.strip(self.repo.root)
245 self.root = self.strip(self.repo.root)
246 self.domain = self.ui.config('notify', 'domain')
246 self.domain = self.ui.config('notify', 'domain')
247 self.mbox = self.ui.config('notify', 'mbox')
247 self.mbox = self.ui.config('notify', 'mbox')
248 self.test = self.ui.configbool('notify', 'test')
248 self.test = self.ui.configbool('notify', 'test')
249 self.charsets = mail._charsets(self.ui)
249 self.charsets = mail._charsets(self.ui)
250 self.subs = self.subscribers()
250 self.subs = self.subscribers()
251 self.merge = self.ui.configbool('notify', 'merge')
251 self.merge = self.ui.configbool('notify', 'merge')
252
252
253 mapfile = None
253 mapfile = None
254 template = (self.ui.config('notify', hooktype) or
254 template = (self.ui.config('notify', hooktype) or
255 self.ui.config('notify', 'template'))
255 self.ui.config('notify', 'template'))
256 if not template:
256 if not template:
257 mapfile = self.ui.config('notify', 'style')
257 mapfile = self.ui.config('notify', 'style')
258 if not mapfile and not template:
258 if not mapfile and not template:
259 template = deftemplates.get(hooktype) or single_template
259 template = deftemplates.get(hooktype) or single_template
260 spec = logcmdutil.templatespec(template, mapfile)
260 spec = logcmdutil.templatespec(template, mapfile)
261 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec,
261 self.t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
262 False, None, False)
263
262
264 def strip(self, path):
263 def strip(self, path):
265 '''strip leading slashes from local path, turn into web-safe path.'''
264 '''strip leading slashes from local path, turn into web-safe path.'''
266
265
267 path = util.pconvert(path)
266 path = util.pconvert(path)
268 count = self.stripcount
267 count = self.stripcount
269 while count > 0:
268 while count > 0:
270 c = path.find('/')
269 c = path.find('/')
271 if c == -1:
270 if c == -1:
272 break
271 break
273 path = path[c + 1:]
272 path = path[c + 1:]
274 count -= 1
273 count -= 1
275 return path
274 return path
276
275
277 def fixmail(self, addr):
276 def fixmail(self, addr):
278 '''try to clean up email addresses.'''
277 '''try to clean up email addresses.'''
279
278
280 addr = util.email(addr.strip())
279 addr = util.email(addr.strip())
281 if self.domain:
280 if self.domain:
282 a = addr.find('@localhost')
281 a = addr.find('@localhost')
283 if a != -1:
282 if a != -1:
284 addr = addr[:a]
283 addr = addr[:a]
285 if '@' not in addr:
284 if '@' not in addr:
286 return addr + '@' + self.domain
285 return addr + '@' + self.domain
287 return addr
286 return addr
288
287
289 def subscribers(self):
288 def subscribers(self):
290 '''return list of email addresses of subscribers to this repo.'''
289 '''return list of email addresses of subscribers to this repo.'''
291 subs = set()
290 subs = set()
292 for user, pats in self.ui.configitems('usersubs'):
291 for user, pats in self.ui.configitems('usersubs'):
293 for pat in pats.split(','):
292 for pat in pats.split(','):
294 if '#' in pat:
293 if '#' in pat:
295 pat, revs = pat.split('#', 1)
294 pat, revs = pat.split('#', 1)
296 else:
295 else:
297 revs = None
296 revs = None
298 if fnmatch.fnmatch(self.repo.root, pat.strip()):
297 if fnmatch.fnmatch(self.repo.root, pat.strip()):
299 subs.add((self.fixmail(user), revs))
298 subs.add((self.fixmail(user), revs))
300 for pat, users in self.ui.configitems('reposubs'):
299 for pat, users in self.ui.configitems('reposubs'):
301 if '#' in pat:
300 if '#' in pat:
302 pat, revs = pat.split('#', 1)
301 pat, revs = pat.split('#', 1)
303 else:
302 else:
304 revs = None
303 revs = None
305 if fnmatch.fnmatch(self.repo.root, pat):
304 if fnmatch.fnmatch(self.repo.root, pat):
306 for user in users.split(','):
305 for user in users.split(','):
307 subs.add((self.fixmail(user), revs))
306 subs.add((self.fixmail(user), revs))
308 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
307 return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
309 for s, r in sorted(subs)]
308 for s, r in sorted(subs)]
310
309
311 def node(self, ctx, **props):
310 def node(self, ctx, **props):
312 '''format one changeset, unless it is a suppressed merge.'''
311 '''format one changeset, unless it is a suppressed merge.'''
313 if not self.merge and len(ctx.parents()) > 1:
312 if not self.merge and len(ctx.parents()) > 1:
314 return False
313 return False
315 self.t.show(ctx, changes=ctx.changeset(),
314 self.t.show(ctx, changes=ctx.changeset(),
316 baseurl=self.ui.config('web', 'baseurl'),
315 baseurl=self.ui.config('web', 'baseurl'),
317 root=self.repo.root, webroot=self.root, **props)
316 root=self.repo.root, webroot=self.root, **props)
318 return True
317 return True
319
318
320 def skipsource(self, source):
319 def skipsource(self, source):
321 '''true if incoming changes from this source should be skipped.'''
320 '''true if incoming changes from this source should be skipped.'''
322 ok_sources = self.ui.config('notify', 'sources').split()
321 ok_sources = self.ui.config('notify', 'sources').split()
323 return source not in ok_sources
322 return source not in ok_sources
324
323
325 def send(self, ctx, count, data):
324 def send(self, ctx, count, data):
326 '''send message.'''
325 '''send message.'''
327
326
328 # Select subscribers by revset
327 # Select subscribers by revset
329 subs = set()
328 subs = set()
330 for sub, spec in self.subs:
329 for sub, spec in self.subs:
331 if spec is None:
330 if spec is None:
332 subs.add(sub)
331 subs.add(sub)
333 continue
332 continue
334 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
333 revs = self.repo.revs('%r and %d:', spec, ctx.rev())
335 if len(revs):
334 if len(revs):
336 subs.add(sub)
335 subs.add(sub)
337 continue
336 continue
338 if len(subs) == 0:
337 if len(subs) == 0:
339 self.ui.debug('notify: no subscribers to selected repo '
338 self.ui.debug('notify: no subscribers to selected repo '
340 'and revset\n')
339 'and revset\n')
341 return
340 return
342
341
343 p = emailparser.Parser()
342 p = emailparser.Parser()
344 try:
343 try:
345 msg = p.parsestr(data)
344 msg = p.parsestr(data)
346 except email.Errors.MessageParseError as inst:
345 except email.Errors.MessageParseError as inst:
347 raise error.Abort(inst)
346 raise error.Abort(inst)
348
347
349 # store sender and subject
348 # store sender and subject
350 sender, subject = msg['From'], msg['Subject']
349 sender, subject = msg['From'], msg['Subject']
351 del msg['From'], msg['Subject']
350 del msg['From'], msg['Subject']
352
351
353 if not msg.is_multipart():
352 if not msg.is_multipart():
354 # create fresh mime message from scratch
353 # create fresh mime message from scratch
355 # (multipart templates must take care of this themselves)
354 # (multipart templates must take care of this themselves)
356 headers = msg.items()
355 headers = msg.items()
357 payload = msg.get_payload()
356 payload = msg.get_payload()
358 # for notification prefer readability over data precision
357 # for notification prefer readability over data precision
359 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
358 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
360 # reinstate custom headers
359 # reinstate custom headers
361 for k, v in headers:
360 for k, v in headers:
362 msg[k] = v
361 msg[k] = v
363
362
364 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
363 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
365
364
366 # try to make subject line exist and be useful
365 # try to make subject line exist and be useful
367 if not subject:
366 if not subject:
368 if count > 1:
367 if count > 1:
369 subject = _('%s: %d new changesets') % (self.root, count)
368 subject = _('%s: %d new changesets') % (self.root, count)
370 else:
369 else:
371 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
370 s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
372 subject = '%s: %s' % (self.root, s)
371 subject = '%s: %s' % (self.root, s)
373 maxsubject = int(self.ui.config('notify', 'maxsubject'))
372 maxsubject = int(self.ui.config('notify', 'maxsubject'))
374 if maxsubject:
373 if maxsubject:
375 subject = util.ellipsis(subject, maxsubject)
374 subject = util.ellipsis(subject, maxsubject)
376 msg['Subject'] = mail.headencode(self.ui, subject,
375 msg['Subject'] = mail.headencode(self.ui, subject,
377 self.charsets, self.test)
376 self.charsets, self.test)
378
377
379 # try to make message have proper sender
378 # try to make message have proper sender
380 if not sender:
379 if not sender:
381 sender = self.ui.config('email', 'from') or self.ui.username()
380 sender = self.ui.config('email', 'from') or self.ui.username()
382 if '@' not in sender or '@localhost' in sender:
381 if '@' not in sender or '@localhost' in sender:
383 sender = self.fixmail(sender)
382 sender = self.fixmail(sender)
384 msg['From'] = mail.addressencode(self.ui, sender,
383 msg['From'] = mail.addressencode(self.ui, sender,
385 self.charsets, self.test)
384 self.charsets, self.test)
386
385
387 msg['X-Hg-Notification'] = 'changeset %s' % ctx
386 msg['X-Hg-Notification'] = 'changeset %s' % ctx
388 if not msg['Message-Id']:
387 if not msg['Message-Id']:
389 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
388 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
390 (ctx, int(time.time()),
389 (ctx, int(time.time()),
391 hash(self.repo.root), socket.getfqdn()))
390 hash(self.repo.root), socket.getfqdn()))
392 msg['To'] = ', '.join(sorted(subs))
391 msg['To'] = ', '.join(sorted(subs))
393
392
394 msgtext = msg.as_string()
393 msgtext = msg.as_string()
395 if self.test:
394 if self.test:
396 self.ui.write(msgtext)
395 self.ui.write(msgtext)
397 if not msgtext.endswith('\n'):
396 if not msgtext.endswith('\n'):
398 self.ui.write('\n')
397 self.ui.write('\n')
399 else:
398 else:
400 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
399 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
401 (len(subs), count))
400 (len(subs), count))
402 mail.sendmail(self.ui, util.email(msg['From']),
401 mail.sendmail(self.ui, util.email(msg['From']),
403 subs, msgtext, mbox=self.mbox)
402 subs, msgtext, mbox=self.mbox)
404
403
405 def diff(self, ctx, ref=None):
404 def diff(self, ctx, ref=None):
406
405
407 maxdiff = int(self.ui.config('notify', 'maxdiff'))
406 maxdiff = int(self.ui.config('notify', 'maxdiff'))
408 prev = ctx.p1().node()
407 prev = ctx.p1().node()
409 if ref:
408 if ref:
410 ref = ref.node()
409 ref = ref.node()
411 else:
410 else:
412 ref = ctx.node()
411 ref = ctx.node()
413 chunks = patch.diff(self.repo, prev, ref,
412 chunks = patch.diff(self.repo, prev, ref,
414 opts=patch.diffallopts(self.ui))
413 opts=patch.diffallopts(self.ui))
415 difflines = ''.join(chunks).splitlines()
414 difflines = ''.join(chunks).splitlines()
416
415
417 if self.ui.configbool('notify', 'diffstat'):
416 if self.ui.configbool('notify', 'diffstat'):
418 s = patch.diffstat(difflines)
417 s = patch.diffstat(difflines)
419 # s may be nil, don't include the header if it is
418 # s may be nil, don't include the header if it is
420 if s:
419 if s:
421 self.ui.write(_('\ndiffstat:\n\n%s') % s)
420 self.ui.write(_('\ndiffstat:\n\n%s') % s)
422
421
423 if maxdiff == 0:
422 if maxdiff == 0:
424 return
423 return
425 elif maxdiff > 0 and len(difflines) > maxdiff:
424 elif maxdiff > 0 and len(difflines) > maxdiff:
426 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
425 msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
427 self.ui.write(msg % (len(difflines), maxdiff))
426 self.ui.write(msg % (len(difflines), maxdiff))
428 difflines = difflines[:maxdiff]
427 difflines = difflines[:maxdiff]
429 elif difflines:
428 elif difflines:
430 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
429 self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
431
430
432 self.ui.write("\n".join(difflines))
431 self.ui.write("\n".join(difflines))
433
432
434 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
433 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
435 '''send email notifications to interested subscribers.
434 '''send email notifications to interested subscribers.
436
435
437 if used as changegroup hook, send one email for all changesets in
436 if used as changegroup hook, send one email for all changesets in
438 changegroup. else send one email per changeset.'''
437 changegroup. else send one email per changeset.'''
439
438
440 n = notifier(ui, repo, hooktype)
439 n = notifier(ui, repo, hooktype)
441 ctx = repo[node]
440 ctx = repo[node]
442
441
443 if not n.subs:
442 if not n.subs:
444 ui.debug('notify: no subscribers to repository %s\n' % n.root)
443 ui.debug('notify: no subscribers to repository %s\n' % n.root)
445 return
444 return
446 if n.skipsource(source):
445 if n.skipsource(source):
447 ui.debug('notify: changes have source "%s" - skipping\n' % source)
446 ui.debug('notify: changes have source "%s" - skipping\n' % source)
448 return
447 return
449
448
450 ui.pushbuffer()
449 ui.pushbuffer()
451 data = ''
450 data = ''
452 count = 0
451 count = 0
453 author = ''
452 author = ''
454 if hooktype == 'changegroup' or hooktype == 'outgoing':
453 if hooktype == 'changegroup' or hooktype == 'outgoing':
455 start, end = ctx.rev(), len(repo)
454 start, end = ctx.rev(), len(repo)
456 for rev in xrange(start, end):
455 for rev in xrange(start, end):
457 if n.node(repo[rev]):
456 if n.node(repo[rev]):
458 count += 1
457 count += 1
459 if not author:
458 if not author:
460 author = repo[rev].user()
459 author = repo[rev].user()
461 else:
460 else:
462 data += ui.popbuffer()
461 data += ui.popbuffer()
463 ui.note(_('notify: suppressing notification for merge %d:%s\n')
462 ui.note(_('notify: suppressing notification for merge %d:%s\n')
464 % (rev, repo[rev].hex()[:12]))
463 % (rev, repo[rev].hex()[:12]))
465 ui.pushbuffer()
464 ui.pushbuffer()
466 if count:
465 if count:
467 n.diff(ctx, repo['tip'])
466 n.diff(ctx, repo['tip'])
468 else:
467 else:
469 if not n.node(ctx):
468 if not n.node(ctx):
470 ui.popbuffer()
469 ui.popbuffer()
471 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
470 ui.note(_('notify: suppressing notification for merge %d:%s\n') %
472 (ctx.rev(), ctx.hex()[:12]))
471 (ctx.rev(), ctx.hex()[:12]))
473 return
472 return
474 count += 1
473 count += 1
475 n.diff(ctx)
474 n.diff(ctx)
476 if not author:
475 if not author:
477 author = ctx.user()
476 author = ctx.user()
478
477
479 data += ui.popbuffer()
478 data += ui.popbuffer()
480 fromauthor = ui.config('notify', 'fromauthor')
479 fromauthor = ui.config('notify', 'fromauthor')
481 if author and fromauthor:
480 if author and fromauthor:
482 data = '\n'.join(['From: %s' % author, data])
481 data = '\n'.join(['From: %s' % author, data])
483
482
484 if count:
483 if count:
485 n.send(ctx, count, data)
484 n.send(ctx, count, data)
@@ -1,3164 +1,3164 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 patch,
35 patch,
36 pathutil,
36 pathutil,
37 pycompat,
37 pycompat,
38 registrar,
38 registrar,
39 revlog,
39 revlog,
40 rewriteutil,
40 rewriteutil,
41 scmutil,
41 scmutil,
42 smartset,
42 smartset,
43 templater,
43 templater,
44 util,
44 util,
45 vfs as vfsmod,
45 vfs as vfsmod,
46 )
46 )
47 stringio = util.stringio
47 stringio = util.stringio
48
48
49 # templates of common command options
49 # templates of common command options
50
50
51 dryrunopts = [
51 dryrunopts = [
52 ('n', 'dry-run', None,
52 ('n', 'dry-run', None,
53 _('do not perform actions, just print output')),
53 _('do not perform actions, just print output')),
54 ]
54 ]
55
55
56 remoteopts = [
56 remoteopts = [
57 ('e', 'ssh', '',
57 ('e', 'ssh', '',
58 _('specify ssh command to use'), _('CMD')),
58 _('specify ssh command to use'), _('CMD')),
59 ('', 'remotecmd', '',
59 ('', 'remotecmd', '',
60 _('specify hg command to run on the remote side'), _('CMD')),
60 _('specify hg command to run on the remote side'), _('CMD')),
61 ('', 'insecure', None,
61 ('', 'insecure', None,
62 _('do not verify server certificate (ignoring web.cacerts config)')),
62 _('do not verify server certificate (ignoring web.cacerts config)')),
63 ]
63 ]
64
64
65 walkopts = [
65 walkopts = [
66 ('I', 'include', [],
66 ('I', 'include', [],
67 _('include names matching the given patterns'), _('PATTERN')),
67 _('include names matching the given patterns'), _('PATTERN')),
68 ('X', 'exclude', [],
68 ('X', 'exclude', [],
69 _('exclude names matching the given patterns'), _('PATTERN')),
69 _('exclude names matching the given patterns'), _('PATTERN')),
70 ]
70 ]
71
71
72 commitopts = [
72 commitopts = [
73 ('m', 'message', '',
73 ('m', 'message', '',
74 _('use text as commit message'), _('TEXT')),
74 _('use text as commit message'), _('TEXT')),
75 ('l', 'logfile', '',
75 ('l', 'logfile', '',
76 _('read commit message from file'), _('FILE')),
76 _('read commit message from file'), _('FILE')),
77 ]
77 ]
78
78
79 commitopts2 = [
79 commitopts2 = [
80 ('d', 'date', '',
80 ('d', 'date', '',
81 _('record the specified date as commit date'), _('DATE')),
81 _('record the specified date as commit date'), _('DATE')),
82 ('u', 'user', '',
82 ('u', 'user', '',
83 _('record the specified user as committer'), _('USER')),
83 _('record the specified user as committer'), _('USER')),
84 ]
84 ]
85
85
86 # hidden for now
86 # hidden for now
87 formatteropts = [
87 formatteropts = [
88 ('T', 'template', '',
88 ('T', 'template', '',
89 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
89 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
90 ]
90 ]
91
91
92 templateopts = [
92 templateopts = [
93 ('', 'style', '',
93 ('', 'style', '',
94 _('display using template map file (DEPRECATED)'), _('STYLE')),
94 _('display using template map file (DEPRECATED)'), _('STYLE')),
95 ('T', 'template', '',
95 ('T', 'template', '',
96 _('display with template'), _('TEMPLATE')),
96 _('display with template'), _('TEMPLATE')),
97 ]
97 ]
98
98
99 logopts = [
99 logopts = [
100 ('p', 'patch', None, _('show patch')),
100 ('p', 'patch', None, _('show patch')),
101 ('g', 'git', None, _('use git extended diff format')),
101 ('g', 'git', None, _('use git extended diff format')),
102 ('l', 'limit', '',
102 ('l', 'limit', '',
103 _('limit number of changes displayed'), _('NUM')),
103 _('limit number of changes displayed'), _('NUM')),
104 ('M', 'no-merges', None, _('do not show merges')),
104 ('M', 'no-merges', None, _('do not show merges')),
105 ('', 'stat', None, _('output diffstat-style summary of changes')),
105 ('', 'stat', None, _('output diffstat-style summary of changes')),
106 ('G', 'graph', None, _("show the revision DAG")),
106 ('G', 'graph', None, _("show the revision DAG")),
107 ] + templateopts
107 ] + templateopts
108
108
109 diffopts = [
109 diffopts = [
110 ('a', 'text', None, _('treat all files as text')),
110 ('a', 'text', None, _('treat all files as text')),
111 ('g', 'git', None, _('use git extended diff format')),
111 ('g', 'git', None, _('use git extended diff format')),
112 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
112 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
113 ('', 'nodates', None, _('omit dates from diff headers'))
113 ('', 'nodates', None, _('omit dates from diff headers'))
114 ]
114 ]
115
115
116 diffwsopts = [
116 diffwsopts = [
117 ('w', 'ignore-all-space', None,
117 ('w', 'ignore-all-space', None,
118 _('ignore white space when comparing lines')),
118 _('ignore white space when comparing lines')),
119 ('b', 'ignore-space-change', None,
119 ('b', 'ignore-space-change', None,
120 _('ignore changes in the amount of white space')),
120 _('ignore changes in the amount of white space')),
121 ('B', 'ignore-blank-lines', None,
121 ('B', 'ignore-blank-lines', None,
122 _('ignore changes whose lines are all blank')),
122 _('ignore changes whose lines are all blank')),
123 ('Z', 'ignore-space-at-eol', None,
123 ('Z', 'ignore-space-at-eol', None,
124 _('ignore changes in whitespace at EOL')),
124 _('ignore changes in whitespace at EOL')),
125 ]
125 ]
126
126
127 diffopts2 = [
127 diffopts2 = [
128 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
128 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
129 ('p', 'show-function', None, _('show which function each change is in')),
129 ('p', 'show-function', None, _('show which function each change is in')),
130 ('', 'reverse', None, _('produce a diff that undoes the changes')),
130 ('', 'reverse', None, _('produce a diff that undoes the changes')),
131 ] + diffwsopts + [
131 ] + diffwsopts + [
132 ('U', 'unified', '',
132 ('U', 'unified', '',
133 _('number of lines of context to show'), _('NUM')),
133 _('number of lines of context to show'), _('NUM')),
134 ('', 'stat', None, _('output diffstat-style summary of changes')),
134 ('', 'stat', None, _('output diffstat-style summary of changes')),
135 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
135 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
136 ]
136 ]
137
137
138 mergetoolopts = [
138 mergetoolopts = [
139 ('t', 'tool', '', _('specify merge tool')),
139 ('t', 'tool', '', _('specify merge tool')),
140 ]
140 ]
141
141
142 similarityopts = [
142 similarityopts = [
143 ('s', 'similarity', '',
143 ('s', 'similarity', '',
144 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
144 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
145 ]
145 ]
146
146
147 subrepoopts = [
147 subrepoopts = [
148 ('S', 'subrepos', None,
148 ('S', 'subrepos', None,
149 _('recurse into subrepositories'))
149 _('recurse into subrepositories'))
150 ]
150 ]
151
151
152 debugrevlogopts = [
152 debugrevlogopts = [
153 ('c', 'changelog', False, _('open changelog')),
153 ('c', 'changelog', False, _('open changelog')),
154 ('m', 'manifest', False, _('open manifest')),
154 ('m', 'manifest', False, _('open manifest')),
155 ('', 'dir', '', _('open directory manifest')),
155 ('', 'dir', '', _('open directory manifest')),
156 ]
156 ]
157
157
158 # special string such that everything below this line will be ingored in the
158 # special string such that everything below this line will be ingored in the
159 # editor text
159 # editor text
160 _linebelow = "^HG: ------------------------ >8 ------------------------$"
160 _linebelow = "^HG: ------------------------ >8 ------------------------$"
161
161
162 def ishunk(x):
162 def ishunk(x):
163 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
163 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
164 return isinstance(x, hunkclasses)
164 return isinstance(x, hunkclasses)
165
165
166 def newandmodified(chunks, originalchunks):
166 def newandmodified(chunks, originalchunks):
167 newlyaddedandmodifiedfiles = set()
167 newlyaddedandmodifiedfiles = set()
168 for chunk in chunks:
168 for chunk in chunks:
169 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
169 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
170 originalchunks:
170 originalchunks:
171 newlyaddedandmodifiedfiles.add(chunk.header.filename())
171 newlyaddedandmodifiedfiles.add(chunk.header.filename())
172 return newlyaddedandmodifiedfiles
172 return newlyaddedandmodifiedfiles
173
173
174 def parsealiases(cmd):
174 def parsealiases(cmd):
175 return cmd.lstrip("^").split("|")
175 return cmd.lstrip("^").split("|")
176
176
177 def setupwrapcolorwrite(ui):
177 def setupwrapcolorwrite(ui):
178 # wrap ui.write so diff output can be labeled/colorized
178 # wrap ui.write so diff output can be labeled/colorized
179 def wrapwrite(orig, *args, **kw):
179 def wrapwrite(orig, *args, **kw):
180 label = kw.pop(r'label', '')
180 label = kw.pop(r'label', '')
181 for chunk, l in patch.difflabel(lambda: args):
181 for chunk, l in patch.difflabel(lambda: args):
182 orig(chunk, label=label + l)
182 orig(chunk, label=label + l)
183
183
184 oldwrite = ui.write
184 oldwrite = ui.write
185 def wrap(*args, **kwargs):
185 def wrap(*args, **kwargs):
186 return wrapwrite(oldwrite, *args, **kwargs)
186 return wrapwrite(oldwrite, *args, **kwargs)
187 setattr(ui, 'write', wrap)
187 setattr(ui, 'write', wrap)
188 return oldwrite
188 return oldwrite
189
189
190 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
190 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
191 if usecurses:
191 if usecurses:
192 if testfile:
192 if testfile:
193 recordfn = crecordmod.testdecorator(testfile,
193 recordfn = crecordmod.testdecorator(testfile,
194 crecordmod.testchunkselector)
194 crecordmod.testchunkselector)
195 else:
195 else:
196 recordfn = crecordmod.chunkselector
196 recordfn = crecordmod.chunkselector
197
197
198 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
198 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
199
199
200 else:
200 else:
201 return patch.filterpatch(ui, originalhunks, operation)
201 return patch.filterpatch(ui, originalhunks, operation)
202
202
203 def recordfilter(ui, originalhunks, operation=None):
203 def recordfilter(ui, originalhunks, operation=None):
204 """ Prompts the user to filter the originalhunks and return a list of
204 """ Prompts the user to filter the originalhunks and return a list of
205 selected hunks.
205 selected hunks.
206 *operation* is used for to build ui messages to indicate the user what
206 *operation* is used for to build ui messages to indicate the user what
207 kind of filtering they are doing: reverting, committing, shelving, etc.
207 kind of filtering they are doing: reverting, committing, shelving, etc.
208 (see patch.filterpatch).
208 (see patch.filterpatch).
209 """
209 """
210 usecurses = crecordmod.checkcurses(ui)
210 usecurses = crecordmod.checkcurses(ui)
211 testfile = ui.config('experimental', 'crecordtest')
211 testfile = ui.config('experimental', 'crecordtest')
212 oldwrite = setupwrapcolorwrite(ui)
212 oldwrite = setupwrapcolorwrite(ui)
213 try:
213 try:
214 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
214 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
215 testfile, operation)
215 testfile, operation)
216 finally:
216 finally:
217 ui.write = oldwrite
217 ui.write = oldwrite
218 return newchunks, newopts
218 return newchunks, newopts
219
219
220 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
220 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
221 filterfn, *pats, **opts):
221 filterfn, *pats, **opts):
222 from . import merge as mergemod
222 from . import merge as mergemod
223 opts = pycompat.byteskwargs(opts)
223 opts = pycompat.byteskwargs(opts)
224 if not ui.interactive():
224 if not ui.interactive():
225 if cmdsuggest:
225 if cmdsuggest:
226 msg = _('running non-interactively, use %s instead') % cmdsuggest
226 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 else:
227 else:
228 msg = _('running non-interactively')
228 msg = _('running non-interactively')
229 raise error.Abort(msg)
229 raise error.Abort(msg)
230
230
231 # make sure username is set before going interactive
231 # make sure username is set before going interactive
232 if not opts.get('user'):
232 if not opts.get('user'):
233 ui.username() # raise exception, username not provided
233 ui.username() # raise exception, username not provided
234
234
235 def recordfunc(ui, repo, message, match, opts):
235 def recordfunc(ui, repo, message, match, opts):
236 """This is generic record driver.
236 """This is generic record driver.
237
237
238 Its job is to interactively filter local changes, and
238 Its job is to interactively filter local changes, and
239 accordingly prepare working directory into a state in which the
239 accordingly prepare working directory into a state in which the
240 job can be delegated to a non-interactive commit command such as
240 job can be delegated to a non-interactive commit command such as
241 'commit' or 'qrefresh'.
241 'commit' or 'qrefresh'.
242
242
243 After the actual job is done by non-interactive command, the
243 After the actual job is done by non-interactive command, the
244 working directory is restored to its original state.
244 working directory is restored to its original state.
245
245
246 In the end we'll record interesting changes, and everything else
246 In the end we'll record interesting changes, and everything else
247 will be left in place, so the user can continue working.
247 will be left in place, so the user can continue working.
248 """
248 """
249
249
250 checkunfinished(repo, commit=True)
250 checkunfinished(repo, commit=True)
251 wctx = repo[None]
251 wctx = repo[None]
252 merge = len(wctx.parents()) > 1
252 merge = len(wctx.parents()) > 1
253 if merge:
253 if merge:
254 raise error.Abort(_('cannot partially commit a merge '
254 raise error.Abort(_('cannot partially commit a merge '
255 '(use "hg commit" instead)'))
255 '(use "hg commit" instead)'))
256
256
257 def fail(f, msg):
257 def fail(f, msg):
258 raise error.Abort('%s: %s' % (f, msg))
258 raise error.Abort('%s: %s' % (f, msg))
259
259
260 force = opts.get('force')
260 force = opts.get('force')
261 if not force:
261 if not force:
262 vdirs = []
262 vdirs = []
263 match.explicitdir = vdirs.append
263 match.explicitdir = vdirs.append
264 match.bad = fail
264 match.bad = fail
265
265
266 status = repo.status(match=match)
266 status = repo.status(match=match)
267 if not force:
267 if not force:
268 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
268 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
269 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts.nodates = True
270 diffopts.nodates = True
271 diffopts.git = True
271 diffopts.git = True
272 diffopts.showfunc = True
272 diffopts.showfunc = True
273 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
273 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originalchunks = patch.parsepatch(originaldiff)
274 originalchunks = patch.parsepatch(originaldiff)
275
275
276 # 1. filter patch, since we are intending to apply subset of it
276 # 1. filter patch, since we are intending to apply subset of it
277 try:
277 try:
278 chunks, newopts = filterfn(ui, originalchunks)
278 chunks, newopts = filterfn(ui, originalchunks)
279 except error.PatchError as err:
279 except error.PatchError as err:
280 raise error.Abort(_('error parsing patch: %s') % err)
280 raise error.Abort(_('error parsing patch: %s') % err)
281 opts.update(newopts)
281 opts.update(newopts)
282
282
283 # We need to keep a backup of files that have been newly added and
283 # We need to keep a backup of files that have been newly added and
284 # modified during the recording process because there is a previous
284 # modified during the recording process because there is a previous
285 # version without the edit in the workdir
285 # version without the edit in the workdir
286 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
286 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 contenders = set()
287 contenders = set()
288 for h in chunks:
288 for h in chunks:
289 try:
289 try:
290 contenders.update(set(h.files()))
290 contenders.update(set(h.files()))
291 except AttributeError:
291 except AttributeError:
292 pass
292 pass
293
293
294 changed = status.modified + status.added + status.removed
294 changed = status.modified + status.added + status.removed
295 newfiles = [f for f in changed if f in contenders]
295 newfiles = [f for f in changed if f in contenders]
296 if not newfiles:
296 if not newfiles:
297 ui.status(_('no changes to record\n'))
297 ui.status(_('no changes to record\n'))
298 return 0
298 return 0
299
299
300 modified = set(status.modified)
300 modified = set(status.modified)
301
301
302 # 2. backup changed files, so we can restore them in the end
302 # 2. backup changed files, so we can restore them in the end
303
303
304 if backupall:
304 if backupall:
305 tobackup = changed
305 tobackup = changed
306 else:
306 else:
307 tobackup = [f for f in newfiles if f in modified or f in \
307 tobackup = [f for f in newfiles if f in modified or f in \
308 newlyaddedandmodifiedfiles]
308 newlyaddedandmodifiedfiles]
309 backups = {}
309 backups = {}
310 if tobackup:
310 if tobackup:
311 backupdir = repo.vfs.join('record-backups')
311 backupdir = repo.vfs.join('record-backups')
312 try:
312 try:
313 os.mkdir(backupdir)
313 os.mkdir(backupdir)
314 except OSError as err:
314 except OSError as err:
315 if err.errno != errno.EEXIST:
315 if err.errno != errno.EEXIST:
316 raise
316 raise
317 try:
317 try:
318 # backup continues
318 # backup continues
319 for f in tobackup:
319 for f in tobackup:
320 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
320 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 dir=backupdir)
321 dir=backupdir)
322 os.close(fd)
322 os.close(fd)
323 ui.debug('backup %r as %r\n' % (f, tmpname))
323 ui.debug('backup %r as %r\n' % (f, tmpname))
324 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
324 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 backups[f] = tmpname
325 backups[f] = tmpname
326
326
327 fp = stringio()
327 fp = stringio()
328 for c in chunks:
328 for c in chunks:
329 fname = c.filename()
329 fname = c.filename()
330 if fname in backups:
330 if fname in backups:
331 c.write(fp)
331 c.write(fp)
332 dopatch = fp.tell()
332 dopatch = fp.tell()
333 fp.seek(0)
333 fp.seek(0)
334
334
335 # 2.5 optionally review / modify patch in text editor
335 # 2.5 optionally review / modify patch in text editor
336 if opts.get('review', False):
336 if opts.get('review', False):
337 patchtext = (crecordmod.diffhelptext
337 patchtext = (crecordmod.diffhelptext
338 + crecordmod.patchhelptext
338 + crecordmod.patchhelptext
339 + fp.read())
339 + fp.read())
340 reviewedpatch = ui.edit(patchtext, "",
340 reviewedpatch = ui.edit(patchtext, "",
341 action="diff",
341 action="diff",
342 repopath=repo.path)
342 repopath=repo.path)
343 fp.truncate(0)
343 fp.truncate(0)
344 fp.write(reviewedpatch)
344 fp.write(reviewedpatch)
345 fp.seek(0)
345 fp.seek(0)
346
346
347 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
347 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 # 3a. apply filtered patch to clean repo (clean)
348 # 3a. apply filtered patch to clean repo (clean)
349 if backups:
349 if backups:
350 # Equivalent to hg.revert
350 # Equivalent to hg.revert
351 m = scmutil.matchfiles(repo, backups.keys())
351 m = scmutil.matchfiles(repo, backups.keys())
352 mergemod.update(repo, repo.dirstate.p1(),
352 mergemod.update(repo, repo.dirstate.p1(),
353 False, True, matcher=m)
353 False, True, matcher=m)
354
354
355 # 3b. (apply)
355 # 3b. (apply)
356 if dopatch:
356 if dopatch:
357 try:
357 try:
358 ui.debug('applying patch\n')
358 ui.debug('applying patch\n')
359 ui.debug(fp.getvalue())
359 ui.debug(fp.getvalue())
360 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
360 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 except error.PatchError as err:
361 except error.PatchError as err:
362 raise error.Abort(str(err))
362 raise error.Abort(str(err))
363 del fp
363 del fp
364
364
365 # 4. We prepared working directory according to filtered
365 # 4. We prepared working directory according to filtered
366 # patch. Now is the time to delegate the job to
366 # patch. Now is the time to delegate the job to
367 # commit/qrefresh or the like!
367 # commit/qrefresh or the like!
368
368
369 # Make all of the pathnames absolute.
369 # Make all of the pathnames absolute.
370 newfiles = [repo.wjoin(nf) for nf in newfiles]
370 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
371 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
372 finally:
372 finally:
373 # 5. finally restore backed-up files
373 # 5. finally restore backed-up files
374 try:
374 try:
375 dirstate = repo.dirstate
375 dirstate = repo.dirstate
376 for realname, tmpname in backups.iteritems():
376 for realname, tmpname in backups.iteritems():
377 ui.debug('restoring %r to %r\n' % (tmpname, realname))
377 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378
378
379 if dirstate[realname] == 'n':
379 if dirstate[realname] == 'n':
380 # without normallookup, restoring timestamp
380 # without normallookup, restoring timestamp
381 # may cause partially committed files
381 # may cause partially committed files
382 # to be treated as unmodified
382 # to be treated as unmodified
383 dirstate.normallookup(realname)
383 dirstate.normallookup(realname)
384
384
385 # copystat=True here and above are a hack to trick any
385 # copystat=True here and above are a hack to trick any
386 # editors that have f open that we haven't modified them.
386 # editors that have f open that we haven't modified them.
387 #
387 #
388 # Also note that this racy as an editor could notice the
388 # Also note that this racy as an editor could notice the
389 # file's mtime before we've finished writing it.
389 # file's mtime before we've finished writing it.
390 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
390 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 os.unlink(tmpname)
391 os.unlink(tmpname)
392 if tobackup:
392 if tobackup:
393 os.rmdir(backupdir)
393 os.rmdir(backupdir)
394 except OSError:
394 except OSError:
395 pass
395 pass
396
396
397 def recordinwlock(ui, repo, message, match, opts):
397 def recordinwlock(ui, repo, message, match, opts):
398 with repo.wlock():
398 with repo.wlock():
399 return recordfunc(ui, repo, message, match, opts)
399 return recordfunc(ui, repo, message, match, opts)
400
400
401 return commit(ui, repo, recordinwlock, pats, opts)
401 return commit(ui, repo, recordinwlock, pats, opts)
402
402
403 class dirnode(object):
403 class dirnode(object):
404 """
404 """
405 Represent a directory in user working copy with information required for
405 Represent a directory in user working copy with information required for
406 the purpose of tersing its status.
406 the purpose of tersing its status.
407
407
408 path is the path to the directory
408 path is the path to the directory
409
409
410 statuses is a set of statuses of all files in this directory (this includes
410 statuses is a set of statuses of all files in this directory (this includes
411 all the files in all the subdirectories too)
411 all the files in all the subdirectories too)
412
412
413 files is a list of files which are direct child of this directory
413 files is a list of files which are direct child of this directory
414
414
415 subdirs is a dictionary of sub-directory name as the key and it's own
415 subdirs is a dictionary of sub-directory name as the key and it's own
416 dirnode object as the value
416 dirnode object as the value
417 """
417 """
418
418
419 def __init__(self, dirpath):
419 def __init__(self, dirpath):
420 self.path = dirpath
420 self.path = dirpath
421 self.statuses = set([])
421 self.statuses = set([])
422 self.files = []
422 self.files = []
423 self.subdirs = {}
423 self.subdirs = {}
424
424
425 def _addfileindir(self, filename, status):
425 def _addfileindir(self, filename, status):
426 """Add a file in this directory as a direct child."""
426 """Add a file in this directory as a direct child."""
427 self.files.append((filename, status))
427 self.files.append((filename, status))
428
428
429 def addfile(self, filename, status):
429 def addfile(self, filename, status):
430 """
430 """
431 Add a file to this directory or to its direct parent directory.
431 Add a file to this directory or to its direct parent directory.
432
432
433 If the file is not direct child of this directory, we traverse to the
433 If the file is not direct child of this directory, we traverse to the
434 directory of which this file is a direct child of and add the file
434 directory of which this file is a direct child of and add the file
435 there.
435 there.
436 """
436 """
437
437
438 # the filename contains a path separator, it means it's not the direct
438 # the filename contains a path separator, it means it's not the direct
439 # child of this directory
439 # child of this directory
440 if '/' in filename:
440 if '/' in filename:
441 subdir, filep = filename.split('/', 1)
441 subdir, filep = filename.split('/', 1)
442
442
443 # does the dirnode object for subdir exists
443 # does the dirnode object for subdir exists
444 if subdir not in self.subdirs:
444 if subdir not in self.subdirs:
445 subdirpath = os.path.join(self.path, subdir)
445 subdirpath = os.path.join(self.path, subdir)
446 self.subdirs[subdir] = dirnode(subdirpath)
446 self.subdirs[subdir] = dirnode(subdirpath)
447
447
448 # try adding the file in subdir
448 # try adding the file in subdir
449 self.subdirs[subdir].addfile(filep, status)
449 self.subdirs[subdir].addfile(filep, status)
450
450
451 else:
451 else:
452 self._addfileindir(filename, status)
452 self._addfileindir(filename, status)
453
453
454 if status not in self.statuses:
454 if status not in self.statuses:
455 self.statuses.add(status)
455 self.statuses.add(status)
456
456
457 def iterfilepaths(self):
457 def iterfilepaths(self):
458 """Yield (status, path) for files directly under this directory."""
458 """Yield (status, path) for files directly under this directory."""
459 for f, st in self.files:
459 for f, st in self.files:
460 yield st, os.path.join(self.path, f)
460 yield st, os.path.join(self.path, f)
461
461
462 def tersewalk(self, terseargs):
462 def tersewalk(self, terseargs):
463 """
463 """
464 Yield (status, path) obtained by processing the status of this
464 Yield (status, path) obtained by processing the status of this
465 dirnode.
465 dirnode.
466
466
467 terseargs is the string of arguments passed by the user with `--terse`
467 terseargs is the string of arguments passed by the user with `--terse`
468 flag.
468 flag.
469
469
470 Following are the cases which can happen:
470 Following are the cases which can happen:
471
471
472 1) All the files in the directory (including all the files in its
472 1) All the files in the directory (including all the files in its
473 subdirectories) share the same status and the user has asked us to terse
473 subdirectories) share the same status and the user has asked us to terse
474 that status. -> yield (status, dirpath)
474 that status. -> yield (status, dirpath)
475
475
476 2) Otherwise, we do following:
476 2) Otherwise, we do following:
477
477
478 a) Yield (status, filepath) for all the files which are in this
478 a) Yield (status, filepath) for all the files which are in this
479 directory (only the ones in this directory, not the subdirs)
479 directory (only the ones in this directory, not the subdirs)
480
480
481 b) Recurse the function on all the subdirectories of this
481 b) Recurse the function on all the subdirectories of this
482 directory
482 directory
483 """
483 """
484
484
485 if len(self.statuses) == 1:
485 if len(self.statuses) == 1:
486 onlyst = self.statuses.pop()
486 onlyst = self.statuses.pop()
487
487
488 # Making sure we terse only when the status abbreviation is
488 # Making sure we terse only when the status abbreviation is
489 # passed as terse argument
489 # passed as terse argument
490 if onlyst in terseargs:
490 if onlyst in terseargs:
491 yield onlyst, self.path + pycompat.ossep
491 yield onlyst, self.path + pycompat.ossep
492 return
492 return
493
493
494 # add the files to status list
494 # add the files to status list
495 for st, fpath in self.iterfilepaths():
495 for st, fpath in self.iterfilepaths():
496 yield st, fpath
496 yield st, fpath
497
497
498 #recurse on the subdirs
498 #recurse on the subdirs
499 for dirobj in self.subdirs.values():
499 for dirobj in self.subdirs.values():
500 for st, fpath in dirobj.tersewalk(terseargs):
500 for st, fpath in dirobj.tersewalk(terseargs):
501 yield st, fpath
501 yield st, fpath
502
502
503 def tersedir(statuslist, terseargs):
503 def tersedir(statuslist, terseargs):
504 """
504 """
505 Terse the status if all the files in a directory shares the same status.
505 Terse the status if all the files in a directory shares the same status.
506
506
507 statuslist is scmutil.status() object which contains a list of files for
507 statuslist is scmutil.status() object which contains a list of files for
508 each status.
508 each status.
509 terseargs is string which is passed by the user as the argument to `--terse`
509 terseargs is string which is passed by the user as the argument to `--terse`
510 flag.
510 flag.
511
511
512 The function makes a tree of objects of dirnode class, and at each node it
512 The function makes a tree of objects of dirnode class, and at each node it
513 stores the information required to know whether we can terse a certain
513 stores the information required to know whether we can terse a certain
514 directory or not.
514 directory or not.
515 """
515 """
516 # the order matters here as that is used to produce final list
516 # the order matters here as that is used to produce final list
517 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
517 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
518
518
519 # checking the argument validity
519 # checking the argument validity
520 for s in pycompat.bytestr(terseargs):
520 for s in pycompat.bytestr(terseargs):
521 if s not in allst:
521 if s not in allst:
522 raise error.Abort(_("'%s' not recognized") % s)
522 raise error.Abort(_("'%s' not recognized") % s)
523
523
524 # creating a dirnode object for the root of the repo
524 # creating a dirnode object for the root of the repo
525 rootobj = dirnode('')
525 rootobj = dirnode('')
526 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
526 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
527 'ignored', 'removed')
527 'ignored', 'removed')
528
528
529 tersedict = {}
529 tersedict = {}
530 for attrname in pstatus:
530 for attrname in pstatus:
531 statuschar = attrname[0:1]
531 statuschar = attrname[0:1]
532 for f in getattr(statuslist, attrname):
532 for f in getattr(statuslist, attrname):
533 rootobj.addfile(f, statuschar)
533 rootobj.addfile(f, statuschar)
534 tersedict[statuschar] = []
534 tersedict[statuschar] = []
535
535
536 # we won't be tersing the root dir, so add files in it
536 # we won't be tersing the root dir, so add files in it
537 for st, fpath in rootobj.iterfilepaths():
537 for st, fpath in rootobj.iterfilepaths():
538 tersedict[st].append(fpath)
538 tersedict[st].append(fpath)
539
539
540 # process each sub-directory and build tersedict
540 # process each sub-directory and build tersedict
541 for subdir in rootobj.subdirs.values():
541 for subdir in rootobj.subdirs.values():
542 for st, f in subdir.tersewalk(terseargs):
542 for st, f in subdir.tersewalk(terseargs):
543 tersedict[st].append(f)
543 tersedict[st].append(f)
544
544
545 tersedlist = []
545 tersedlist = []
546 for st in allst:
546 for st in allst:
547 tersedict[st].sort()
547 tersedict[st].sort()
548 tersedlist.append(tersedict[st])
548 tersedlist.append(tersedict[st])
549
549
550 return tersedlist
550 return tersedlist
551
551
552 def _commentlines(raw):
552 def _commentlines(raw):
553 '''Surround lineswith a comment char and a new line'''
553 '''Surround lineswith a comment char and a new line'''
554 lines = raw.splitlines()
554 lines = raw.splitlines()
555 commentedlines = ['# %s' % line for line in lines]
555 commentedlines = ['# %s' % line for line in lines]
556 return '\n'.join(commentedlines) + '\n'
556 return '\n'.join(commentedlines) + '\n'
557
557
558 def _conflictsmsg(repo):
558 def _conflictsmsg(repo):
559 # avoid merge cycle
559 # avoid merge cycle
560 from . import merge as mergemod
560 from . import merge as mergemod
561 mergestate = mergemod.mergestate.read(repo)
561 mergestate = mergemod.mergestate.read(repo)
562 if not mergestate.active():
562 if not mergestate.active():
563 return
563 return
564
564
565 m = scmutil.match(repo[None])
565 m = scmutil.match(repo[None])
566 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
566 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
567 if unresolvedlist:
567 if unresolvedlist:
568 mergeliststr = '\n'.join(
568 mergeliststr = '\n'.join(
569 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
569 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
570 for path in unresolvedlist])
570 for path in unresolvedlist])
571 msg = _('''Unresolved merge conflicts:
571 msg = _('''Unresolved merge conflicts:
572
572
573 %s
573 %s
574
574
575 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
575 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
576 else:
576 else:
577 msg = _('No unresolved merge conflicts.')
577 msg = _('No unresolved merge conflicts.')
578
578
579 return _commentlines(msg)
579 return _commentlines(msg)
580
580
581 def _helpmessage(continuecmd, abortcmd):
581 def _helpmessage(continuecmd, abortcmd):
582 msg = _('To continue: %s\n'
582 msg = _('To continue: %s\n'
583 'To abort: %s') % (continuecmd, abortcmd)
583 'To abort: %s') % (continuecmd, abortcmd)
584 return _commentlines(msg)
584 return _commentlines(msg)
585
585
586 def _rebasemsg():
586 def _rebasemsg():
587 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
587 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
588
588
589 def _histeditmsg():
589 def _histeditmsg():
590 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
590 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
591
591
592 def _unshelvemsg():
592 def _unshelvemsg():
593 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
593 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
594
594
595 def _updatecleanmsg(dest=None):
595 def _updatecleanmsg(dest=None):
596 warning = _('warning: this will discard uncommitted changes')
596 warning = _('warning: this will discard uncommitted changes')
597 return 'hg update --clean %s (%s)' % (dest or '.', warning)
597 return 'hg update --clean %s (%s)' % (dest or '.', warning)
598
598
599 def _graftmsg():
599 def _graftmsg():
600 # tweakdefaults requires `update` to have a rev hence the `.`
600 # tweakdefaults requires `update` to have a rev hence the `.`
601 return _helpmessage('hg graft --continue', _updatecleanmsg())
601 return _helpmessage('hg graft --continue', _updatecleanmsg())
602
602
603 def _mergemsg():
603 def _mergemsg():
604 # tweakdefaults requires `update` to have a rev hence the `.`
604 # tweakdefaults requires `update` to have a rev hence the `.`
605 return _helpmessage('hg commit', _updatecleanmsg())
605 return _helpmessage('hg commit', _updatecleanmsg())
606
606
607 def _bisectmsg():
607 def _bisectmsg():
608 msg = _('To mark the changeset good: hg bisect --good\n'
608 msg = _('To mark the changeset good: hg bisect --good\n'
609 'To mark the changeset bad: hg bisect --bad\n'
609 'To mark the changeset bad: hg bisect --bad\n'
610 'To abort: hg bisect --reset\n')
610 'To abort: hg bisect --reset\n')
611 return _commentlines(msg)
611 return _commentlines(msg)
612
612
613 def fileexistspredicate(filename):
613 def fileexistspredicate(filename):
614 return lambda repo: repo.vfs.exists(filename)
614 return lambda repo: repo.vfs.exists(filename)
615
615
616 def _mergepredicate(repo):
616 def _mergepredicate(repo):
617 return len(repo[None].parents()) > 1
617 return len(repo[None].parents()) > 1
618
618
619 STATES = (
619 STATES = (
620 # (state, predicate to detect states, helpful message function)
620 # (state, predicate to detect states, helpful message function)
621 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
621 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
622 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
622 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
623 ('graft', fileexistspredicate('graftstate'), _graftmsg),
623 ('graft', fileexistspredicate('graftstate'), _graftmsg),
624 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
624 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
625 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
625 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
626 # The merge state is part of a list that will be iterated over.
626 # The merge state is part of a list that will be iterated over.
627 # They need to be last because some of the other unfinished states may also
627 # They need to be last because some of the other unfinished states may also
628 # be in a merge or update state (eg. rebase, histedit, graft, etc).
628 # be in a merge or update state (eg. rebase, histedit, graft, etc).
629 # We want those to have priority.
629 # We want those to have priority.
630 ('merge', _mergepredicate, _mergemsg),
630 ('merge', _mergepredicate, _mergemsg),
631 )
631 )
632
632
633 def _getrepostate(repo):
633 def _getrepostate(repo):
634 # experimental config: commands.status.skipstates
634 # experimental config: commands.status.skipstates
635 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
635 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
636 for state, statedetectionpredicate, msgfn in STATES:
636 for state, statedetectionpredicate, msgfn in STATES:
637 if state in skip:
637 if state in skip:
638 continue
638 continue
639 if statedetectionpredicate(repo):
639 if statedetectionpredicate(repo):
640 return (state, statedetectionpredicate, msgfn)
640 return (state, statedetectionpredicate, msgfn)
641
641
642 def morestatus(repo, fm):
642 def morestatus(repo, fm):
643 statetuple = _getrepostate(repo)
643 statetuple = _getrepostate(repo)
644 label = 'status.morestatus'
644 label = 'status.morestatus'
645 if statetuple:
645 if statetuple:
646 fm.startitem()
646 fm.startitem()
647 state, statedetectionpredicate, helpfulmsg = statetuple
647 state, statedetectionpredicate, helpfulmsg = statetuple
648 statemsg = _('The repository is in an unfinished *%s* state.') % state
648 statemsg = _('The repository is in an unfinished *%s* state.') % state
649 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
649 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
650 conmsg = _conflictsmsg(repo)
650 conmsg = _conflictsmsg(repo)
651 if conmsg:
651 if conmsg:
652 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
652 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
653 if helpfulmsg:
653 if helpfulmsg:
654 helpmsg = helpfulmsg()
654 helpmsg = helpfulmsg()
655 fm.write('helpmsg', '%s\n', helpmsg, label=label)
655 fm.write('helpmsg', '%s\n', helpmsg, label=label)
656
656
657 def findpossible(cmd, table, strict=False):
657 def findpossible(cmd, table, strict=False):
658 """
658 """
659 Return cmd -> (aliases, command table entry)
659 Return cmd -> (aliases, command table entry)
660 for each matching command.
660 for each matching command.
661 Return debug commands (or their aliases) only if no normal command matches.
661 Return debug commands (or their aliases) only if no normal command matches.
662 """
662 """
663 choice = {}
663 choice = {}
664 debugchoice = {}
664 debugchoice = {}
665
665
666 if cmd in table:
666 if cmd in table:
667 # short-circuit exact matches, "log" alias beats "^log|history"
667 # short-circuit exact matches, "log" alias beats "^log|history"
668 keys = [cmd]
668 keys = [cmd]
669 else:
669 else:
670 keys = table.keys()
670 keys = table.keys()
671
671
672 allcmds = []
672 allcmds = []
673 for e in keys:
673 for e in keys:
674 aliases = parsealiases(e)
674 aliases = parsealiases(e)
675 allcmds.extend(aliases)
675 allcmds.extend(aliases)
676 found = None
676 found = None
677 if cmd in aliases:
677 if cmd in aliases:
678 found = cmd
678 found = cmd
679 elif not strict:
679 elif not strict:
680 for a in aliases:
680 for a in aliases:
681 if a.startswith(cmd):
681 if a.startswith(cmd):
682 found = a
682 found = a
683 break
683 break
684 if found is not None:
684 if found is not None:
685 if aliases[0].startswith("debug") or found.startswith("debug"):
685 if aliases[0].startswith("debug") or found.startswith("debug"):
686 debugchoice[found] = (aliases, table[e])
686 debugchoice[found] = (aliases, table[e])
687 else:
687 else:
688 choice[found] = (aliases, table[e])
688 choice[found] = (aliases, table[e])
689
689
690 if not choice and debugchoice:
690 if not choice and debugchoice:
691 choice = debugchoice
691 choice = debugchoice
692
692
693 return choice, allcmds
693 return choice, allcmds
694
694
695 def findcmd(cmd, table, strict=True):
695 def findcmd(cmd, table, strict=True):
696 """Return (aliases, command table entry) for command string."""
696 """Return (aliases, command table entry) for command string."""
697 choice, allcmds = findpossible(cmd, table, strict)
697 choice, allcmds = findpossible(cmd, table, strict)
698
698
699 if cmd in choice:
699 if cmd in choice:
700 return choice[cmd]
700 return choice[cmd]
701
701
702 if len(choice) > 1:
702 if len(choice) > 1:
703 clist = sorted(choice)
703 clist = sorted(choice)
704 raise error.AmbiguousCommand(cmd, clist)
704 raise error.AmbiguousCommand(cmd, clist)
705
705
706 if choice:
706 if choice:
707 return list(choice.values())[0]
707 return list(choice.values())[0]
708
708
709 raise error.UnknownCommand(cmd, allcmds)
709 raise error.UnknownCommand(cmd, allcmds)
710
710
711 def changebranch(ui, repo, revs, label):
711 def changebranch(ui, repo, revs, label):
712 """ Change the branch name of given revs to label """
712 """ Change the branch name of given revs to label """
713
713
714 with repo.wlock(), repo.lock(), repo.transaction('branches'):
714 with repo.wlock(), repo.lock(), repo.transaction('branches'):
715 # abort in case of uncommitted merge or dirty wdir
715 # abort in case of uncommitted merge or dirty wdir
716 bailifchanged(repo)
716 bailifchanged(repo)
717 revs = scmutil.revrange(repo, revs)
717 revs = scmutil.revrange(repo, revs)
718 if not revs:
718 if not revs:
719 raise error.Abort("empty revision set")
719 raise error.Abort("empty revision set")
720 roots = repo.revs('roots(%ld)', revs)
720 roots = repo.revs('roots(%ld)', revs)
721 if len(roots) > 1:
721 if len(roots) > 1:
722 raise error.Abort(_("cannot change branch of non-linear revisions"))
722 raise error.Abort(_("cannot change branch of non-linear revisions"))
723 rewriteutil.precheck(repo, revs, 'change branch of')
723 rewriteutil.precheck(repo, revs, 'change branch of')
724
724
725 root = repo[roots.first()]
725 root = repo[roots.first()]
726 if not root.p1().branch() == label and label in repo.branchmap():
726 if not root.p1().branch() == label and label in repo.branchmap():
727 raise error.Abort(_("a branch of the same name already exists"))
727 raise error.Abort(_("a branch of the same name already exists"))
728
728
729 if repo.revs('merge() and %ld', revs):
729 if repo.revs('merge() and %ld', revs):
730 raise error.Abort(_("cannot change branch of a merge commit"))
730 raise error.Abort(_("cannot change branch of a merge commit"))
731 if repo.revs('obsolete() and %ld', revs):
731 if repo.revs('obsolete() and %ld', revs):
732 raise error.Abort(_("cannot change branch of a obsolete changeset"))
732 raise error.Abort(_("cannot change branch of a obsolete changeset"))
733
733
734 # make sure only topological heads
734 # make sure only topological heads
735 if repo.revs('heads(%ld) - head()', revs):
735 if repo.revs('heads(%ld) - head()', revs):
736 raise error.Abort(_("cannot change branch in middle of a stack"))
736 raise error.Abort(_("cannot change branch in middle of a stack"))
737
737
738 replacements = {}
738 replacements = {}
739 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
739 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
740 # mercurial.subrepo -> mercurial.cmdutil
740 # mercurial.subrepo -> mercurial.cmdutil
741 from . import context
741 from . import context
742 for rev in revs:
742 for rev in revs:
743 ctx = repo[rev]
743 ctx = repo[rev]
744 oldbranch = ctx.branch()
744 oldbranch = ctx.branch()
745 # check if ctx has same branch
745 # check if ctx has same branch
746 if oldbranch == label:
746 if oldbranch == label:
747 continue
747 continue
748
748
749 def filectxfn(repo, newctx, path):
749 def filectxfn(repo, newctx, path):
750 try:
750 try:
751 return ctx[path]
751 return ctx[path]
752 except error.ManifestLookupError:
752 except error.ManifestLookupError:
753 return None
753 return None
754
754
755 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
755 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
756 % (hex(ctx.node()), oldbranch, label))
756 % (hex(ctx.node()), oldbranch, label))
757 extra = ctx.extra()
757 extra = ctx.extra()
758 extra['branch_change'] = hex(ctx.node())
758 extra['branch_change'] = hex(ctx.node())
759 # While changing branch of set of linear commits, make sure that
759 # While changing branch of set of linear commits, make sure that
760 # we base our commits on new parent rather than old parent which
760 # we base our commits on new parent rather than old parent which
761 # was obsoleted while changing the branch
761 # was obsoleted while changing the branch
762 p1 = ctx.p1().node()
762 p1 = ctx.p1().node()
763 p2 = ctx.p2().node()
763 p2 = ctx.p2().node()
764 if p1 in replacements:
764 if p1 in replacements:
765 p1 = replacements[p1][0]
765 p1 = replacements[p1][0]
766 if p2 in replacements:
766 if p2 in replacements:
767 p2 = replacements[p2][0]
767 p2 = replacements[p2][0]
768
768
769 mc = context.memctx(repo, (p1, p2),
769 mc = context.memctx(repo, (p1, p2),
770 ctx.description(),
770 ctx.description(),
771 ctx.files(),
771 ctx.files(),
772 filectxfn,
772 filectxfn,
773 user=ctx.user(),
773 user=ctx.user(),
774 date=ctx.date(),
774 date=ctx.date(),
775 extra=extra,
775 extra=extra,
776 branch=label)
776 branch=label)
777
777
778 commitphase = ctx.phase()
778 commitphase = ctx.phase()
779 overrides = {('phases', 'new-commit'): commitphase}
779 overrides = {('phases', 'new-commit'): commitphase}
780 with repo.ui.configoverride(overrides, 'branch-change'):
780 with repo.ui.configoverride(overrides, 'branch-change'):
781 newnode = repo.commitctx(mc)
781 newnode = repo.commitctx(mc)
782
782
783 replacements[ctx.node()] = (newnode,)
783 replacements[ctx.node()] = (newnode,)
784 ui.debug('new node id is %s\n' % hex(newnode))
784 ui.debug('new node id is %s\n' % hex(newnode))
785
785
786 # create obsmarkers and move bookmarks
786 # create obsmarkers and move bookmarks
787 scmutil.cleanupnodes(repo, replacements, 'branch-change')
787 scmutil.cleanupnodes(repo, replacements, 'branch-change')
788
788
789 # move the working copy too
789 # move the working copy too
790 wctx = repo[None]
790 wctx = repo[None]
791 # in-progress merge is a bit too complex for now.
791 # in-progress merge is a bit too complex for now.
792 if len(wctx.parents()) == 1:
792 if len(wctx.parents()) == 1:
793 newid = replacements.get(wctx.p1().node())
793 newid = replacements.get(wctx.p1().node())
794 if newid is not None:
794 if newid is not None:
795 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
795 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
796 # mercurial.cmdutil
796 # mercurial.cmdutil
797 from . import hg
797 from . import hg
798 hg.update(repo, newid[0], quietempty=True)
798 hg.update(repo, newid[0], quietempty=True)
799
799
800 ui.status(_("changed branch on %d changesets\n") % len(replacements))
800 ui.status(_("changed branch on %d changesets\n") % len(replacements))
801
801
802 def findrepo(p):
802 def findrepo(p):
803 while not os.path.isdir(os.path.join(p, ".hg")):
803 while not os.path.isdir(os.path.join(p, ".hg")):
804 oldp, p = p, os.path.dirname(p)
804 oldp, p = p, os.path.dirname(p)
805 if p == oldp:
805 if p == oldp:
806 return None
806 return None
807
807
808 return p
808 return p
809
809
810 def bailifchanged(repo, merge=True, hint=None):
810 def bailifchanged(repo, merge=True, hint=None):
811 """ enforce the precondition that working directory must be clean.
811 """ enforce the precondition that working directory must be clean.
812
812
813 'merge' can be set to false if a pending uncommitted merge should be
813 'merge' can be set to false if a pending uncommitted merge should be
814 ignored (such as when 'update --check' runs).
814 ignored (such as when 'update --check' runs).
815
815
816 'hint' is the usual hint given to Abort exception.
816 'hint' is the usual hint given to Abort exception.
817 """
817 """
818
818
819 if merge and repo.dirstate.p2() != nullid:
819 if merge and repo.dirstate.p2() != nullid:
820 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
820 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
821 modified, added, removed, deleted = repo.status()[:4]
821 modified, added, removed, deleted = repo.status()[:4]
822 if modified or added or removed or deleted:
822 if modified or added or removed or deleted:
823 raise error.Abort(_('uncommitted changes'), hint=hint)
823 raise error.Abort(_('uncommitted changes'), hint=hint)
824 ctx = repo[None]
824 ctx = repo[None]
825 for s in sorted(ctx.substate):
825 for s in sorted(ctx.substate):
826 ctx.sub(s).bailifchanged(hint=hint)
826 ctx.sub(s).bailifchanged(hint=hint)
827
827
828 def logmessage(ui, opts):
828 def logmessage(ui, opts):
829 """ get the log message according to -m and -l option """
829 """ get the log message according to -m and -l option """
830 message = opts.get('message')
830 message = opts.get('message')
831 logfile = opts.get('logfile')
831 logfile = opts.get('logfile')
832
832
833 if message and logfile:
833 if message and logfile:
834 raise error.Abort(_('options --message and --logfile are mutually '
834 raise error.Abort(_('options --message and --logfile are mutually '
835 'exclusive'))
835 'exclusive'))
836 if not message and logfile:
836 if not message and logfile:
837 try:
837 try:
838 if isstdiofilename(logfile):
838 if isstdiofilename(logfile):
839 message = ui.fin.read()
839 message = ui.fin.read()
840 else:
840 else:
841 message = '\n'.join(util.readfile(logfile).splitlines())
841 message = '\n'.join(util.readfile(logfile).splitlines())
842 except IOError as inst:
842 except IOError as inst:
843 raise error.Abort(_("can't read commit message '%s': %s") %
843 raise error.Abort(_("can't read commit message '%s': %s") %
844 (logfile, encoding.strtolocal(inst.strerror)))
844 (logfile, encoding.strtolocal(inst.strerror)))
845 return message
845 return message
846
846
847 def mergeeditform(ctxorbool, baseformname):
847 def mergeeditform(ctxorbool, baseformname):
848 """return appropriate editform name (referencing a committemplate)
848 """return appropriate editform name (referencing a committemplate)
849
849
850 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
850 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
851 merging is committed.
851 merging is committed.
852
852
853 This returns baseformname with '.merge' appended if it is a merge,
853 This returns baseformname with '.merge' appended if it is a merge,
854 otherwise '.normal' is appended.
854 otherwise '.normal' is appended.
855 """
855 """
856 if isinstance(ctxorbool, bool):
856 if isinstance(ctxorbool, bool):
857 if ctxorbool:
857 if ctxorbool:
858 return baseformname + ".merge"
858 return baseformname + ".merge"
859 elif 1 < len(ctxorbool.parents()):
859 elif 1 < len(ctxorbool.parents()):
860 return baseformname + ".merge"
860 return baseformname + ".merge"
861
861
862 return baseformname + ".normal"
862 return baseformname + ".normal"
863
863
864 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
864 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
865 editform='', **opts):
865 editform='', **opts):
866 """get appropriate commit message editor according to '--edit' option
866 """get appropriate commit message editor according to '--edit' option
867
867
868 'finishdesc' is a function to be called with edited commit message
868 'finishdesc' is a function to be called with edited commit message
869 (= 'description' of the new changeset) just after editing, but
869 (= 'description' of the new changeset) just after editing, but
870 before checking empty-ness. It should return actual text to be
870 before checking empty-ness. It should return actual text to be
871 stored into history. This allows to change description before
871 stored into history. This allows to change description before
872 storing.
872 storing.
873
873
874 'extramsg' is a extra message to be shown in the editor instead of
874 'extramsg' is a extra message to be shown in the editor instead of
875 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
875 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
876 is automatically added.
876 is automatically added.
877
877
878 'editform' is a dot-separated list of names, to distinguish
878 'editform' is a dot-separated list of names, to distinguish
879 the purpose of commit text editing.
879 the purpose of commit text editing.
880
880
881 'getcommiteditor' returns 'commitforceeditor' regardless of
881 'getcommiteditor' returns 'commitforceeditor' regardless of
882 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
882 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
883 they are specific for usage in MQ.
883 they are specific for usage in MQ.
884 """
884 """
885 if edit or finishdesc or extramsg:
885 if edit or finishdesc or extramsg:
886 return lambda r, c, s: commitforceeditor(r, c, s,
886 return lambda r, c, s: commitforceeditor(r, c, s,
887 finishdesc=finishdesc,
887 finishdesc=finishdesc,
888 extramsg=extramsg,
888 extramsg=extramsg,
889 editform=editform)
889 editform=editform)
890 elif editform:
890 elif editform:
891 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
891 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
892 else:
892 else:
893 return commiteditor
893 return commiteditor
894
894
895 def makefilename(repo, pat, node, desc=None,
895 def makefilename(repo, pat, node, desc=None,
896 total=None, seqno=None, revwidth=None, pathname=None):
896 total=None, seqno=None, revwidth=None, pathname=None):
897 node_expander = {
897 node_expander = {
898 'H': lambda: hex(node),
898 'H': lambda: hex(node),
899 'R': lambda: '%d' % repo.changelog.rev(node),
899 'R': lambda: '%d' % repo.changelog.rev(node),
900 'h': lambda: short(node),
900 'h': lambda: short(node),
901 'm': lambda: re.sub('[^\w]', '_', desc or '')
901 'm': lambda: re.sub('[^\w]', '_', desc or '')
902 }
902 }
903 expander = {
903 expander = {
904 '%': lambda: '%',
904 '%': lambda: '%',
905 'b': lambda: os.path.basename(repo.root),
905 'b': lambda: os.path.basename(repo.root),
906 }
906 }
907
907
908 try:
908 try:
909 if node:
909 if node:
910 expander.update(node_expander)
910 expander.update(node_expander)
911 if node:
911 if node:
912 expander['r'] = (lambda:
912 expander['r'] = (lambda:
913 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
913 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
914 if total is not None:
914 if total is not None:
915 expander['N'] = lambda: '%d' % total
915 expander['N'] = lambda: '%d' % total
916 if seqno is not None:
916 if seqno is not None:
917 expander['n'] = lambda: '%d' % seqno
917 expander['n'] = lambda: '%d' % seqno
918 if total is not None and seqno is not None:
918 if total is not None and seqno is not None:
919 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
919 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
920 if pathname is not None:
920 if pathname is not None:
921 expander['s'] = lambda: os.path.basename(pathname)
921 expander['s'] = lambda: os.path.basename(pathname)
922 expander['d'] = lambda: os.path.dirname(pathname) or '.'
922 expander['d'] = lambda: os.path.dirname(pathname) or '.'
923 expander['p'] = lambda: pathname
923 expander['p'] = lambda: pathname
924
924
925 newname = []
925 newname = []
926 patlen = len(pat)
926 patlen = len(pat)
927 i = 0
927 i = 0
928 while i < patlen:
928 while i < patlen:
929 c = pat[i:i + 1]
929 c = pat[i:i + 1]
930 if c == '%':
930 if c == '%':
931 i += 1
931 i += 1
932 c = pat[i:i + 1]
932 c = pat[i:i + 1]
933 c = expander[c]()
933 c = expander[c]()
934 newname.append(c)
934 newname.append(c)
935 i += 1
935 i += 1
936 return ''.join(newname)
936 return ''.join(newname)
937 except KeyError as inst:
937 except KeyError as inst:
938 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
938 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
939 inst.args[0])
939 inst.args[0])
940
940
941 def isstdiofilename(pat):
941 def isstdiofilename(pat):
942 """True if the given pat looks like a filename denoting stdin/stdout"""
942 """True if the given pat looks like a filename denoting stdin/stdout"""
943 return not pat or pat == '-'
943 return not pat or pat == '-'
944
944
945 class _unclosablefile(object):
945 class _unclosablefile(object):
946 def __init__(self, fp):
946 def __init__(self, fp):
947 self._fp = fp
947 self._fp = fp
948
948
949 def close(self):
949 def close(self):
950 pass
950 pass
951
951
952 def __iter__(self):
952 def __iter__(self):
953 return iter(self._fp)
953 return iter(self._fp)
954
954
955 def __getattr__(self, attr):
955 def __getattr__(self, attr):
956 return getattr(self._fp, attr)
956 return getattr(self._fp, attr)
957
957
958 def __enter__(self):
958 def __enter__(self):
959 return self
959 return self
960
960
961 def __exit__(self, exc_type, exc_value, exc_tb):
961 def __exit__(self, exc_type, exc_value, exc_tb):
962 pass
962 pass
963
963
964 def makefileobj(repo, pat, node=None, desc=None, total=None,
964 def makefileobj(repo, pat, node=None, desc=None, total=None,
965 seqno=None, revwidth=None, mode='wb', modemap=None,
965 seqno=None, revwidth=None, mode='wb', modemap=None,
966 pathname=None):
966 pathname=None):
967
967
968 writable = mode not in ('r', 'rb')
968 writable = mode not in ('r', 'rb')
969
969
970 if isstdiofilename(pat):
970 if isstdiofilename(pat):
971 if writable:
971 if writable:
972 fp = repo.ui.fout
972 fp = repo.ui.fout
973 else:
973 else:
974 fp = repo.ui.fin
974 fp = repo.ui.fin
975 return _unclosablefile(fp)
975 return _unclosablefile(fp)
976 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
976 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
977 if modemap is not None:
977 if modemap is not None:
978 mode = modemap.get(fn, mode)
978 mode = modemap.get(fn, mode)
979 if mode == 'wb':
979 if mode == 'wb':
980 modemap[fn] = 'ab'
980 modemap[fn] = 'ab'
981 return open(fn, mode)
981 return open(fn, mode)
982
982
983 def openrevlog(repo, cmd, file_, opts):
983 def openrevlog(repo, cmd, file_, opts):
984 """opens the changelog, manifest, a filelog or a given revlog"""
984 """opens the changelog, manifest, a filelog or a given revlog"""
985 cl = opts['changelog']
985 cl = opts['changelog']
986 mf = opts['manifest']
986 mf = opts['manifest']
987 dir = opts['dir']
987 dir = opts['dir']
988 msg = None
988 msg = None
989 if cl and mf:
989 if cl and mf:
990 msg = _('cannot specify --changelog and --manifest at the same time')
990 msg = _('cannot specify --changelog and --manifest at the same time')
991 elif cl and dir:
991 elif cl and dir:
992 msg = _('cannot specify --changelog and --dir at the same time')
992 msg = _('cannot specify --changelog and --dir at the same time')
993 elif cl or mf or dir:
993 elif cl or mf or dir:
994 if file_:
994 if file_:
995 msg = _('cannot specify filename with --changelog or --manifest')
995 msg = _('cannot specify filename with --changelog or --manifest')
996 elif not repo:
996 elif not repo:
997 msg = _('cannot specify --changelog or --manifest or --dir '
997 msg = _('cannot specify --changelog or --manifest or --dir '
998 'without a repository')
998 'without a repository')
999 if msg:
999 if msg:
1000 raise error.Abort(msg)
1000 raise error.Abort(msg)
1001
1001
1002 r = None
1002 r = None
1003 if repo:
1003 if repo:
1004 if cl:
1004 if cl:
1005 r = repo.unfiltered().changelog
1005 r = repo.unfiltered().changelog
1006 elif dir:
1006 elif dir:
1007 if 'treemanifest' not in repo.requirements:
1007 if 'treemanifest' not in repo.requirements:
1008 raise error.Abort(_("--dir can only be used on repos with "
1008 raise error.Abort(_("--dir can only be used on repos with "
1009 "treemanifest enabled"))
1009 "treemanifest enabled"))
1010 dirlog = repo.manifestlog._revlog.dirlog(dir)
1010 dirlog = repo.manifestlog._revlog.dirlog(dir)
1011 if len(dirlog):
1011 if len(dirlog):
1012 r = dirlog
1012 r = dirlog
1013 elif mf:
1013 elif mf:
1014 r = repo.manifestlog._revlog
1014 r = repo.manifestlog._revlog
1015 elif file_:
1015 elif file_:
1016 filelog = repo.file(file_)
1016 filelog = repo.file(file_)
1017 if len(filelog):
1017 if len(filelog):
1018 r = filelog
1018 r = filelog
1019 if not r:
1019 if not r:
1020 if not file_:
1020 if not file_:
1021 raise error.CommandError(cmd, _('invalid arguments'))
1021 raise error.CommandError(cmd, _('invalid arguments'))
1022 if not os.path.isfile(file_):
1022 if not os.path.isfile(file_):
1023 raise error.Abort(_("revlog '%s' not found") % file_)
1023 raise error.Abort(_("revlog '%s' not found") % file_)
1024 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1024 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1025 file_[:-2] + ".i")
1025 file_[:-2] + ".i")
1026 return r
1026 return r
1027
1027
1028 def copy(ui, repo, pats, opts, rename=False):
1028 def copy(ui, repo, pats, opts, rename=False):
1029 # called with the repo lock held
1029 # called with the repo lock held
1030 #
1030 #
1031 # hgsep => pathname that uses "/" to separate directories
1031 # hgsep => pathname that uses "/" to separate directories
1032 # ossep => pathname that uses os.sep to separate directories
1032 # ossep => pathname that uses os.sep to separate directories
1033 cwd = repo.getcwd()
1033 cwd = repo.getcwd()
1034 targets = {}
1034 targets = {}
1035 after = opts.get("after")
1035 after = opts.get("after")
1036 dryrun = opts.get("dry_run")
1036 dryrun = opts.get("dry_run")
1037 wctx = repo[None]
1037 wctx = repo[None]
1038
1038
1039 def walkpat(pat):
1039 def walkpat(pat):
1040 srcs = []
1040 srcs = []
1041 if after:
1041 if after:
1042 badstates = '?'
1042 badstates = '?'
1043 else:
1043 else:
1044 badstates = '?r'
1044 badstates = '?r'
1045 m = scmutil.match(wctx, [pat], opts, globbed=True)
1045 m = scmutil.match(wctx, [pat], opts, globbed=True)
1046 for abs in wctx.walk(m):
1046 for abs in wctx.walk(m):
1047 state = repo.dirstate[abs]
1047 state = repo.dirstate[abs]
1048 rel = m.rel(abs)
1048 rel = m.rel(abs)
1049 exact = m.exact(abs)
1049 exact = m.exact(abs)
1050 if state in badstates:
1050 if state in badstates:
1051 if exact and state == '?':
1051 if exact and state == '?':
1052 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1052 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1053 if exact and state == 'r':
1053 if exact and state == 'r':
1054 ui.warn(_('%s: not copying - file has been marked for'
1054 ui.warn(_('%s: not copying - file has been marked for'
1055 ' remove\n') % rel)
1055 ' remove\n') % rel)
1056 continue
1056 continue
1057 # abs: hgsep
1057 # abs: hgsep
1058 # rel: ossep
1058 # rel: ossep
1059 srcs.append((abs, rel, exact))
1059 srcs.append((abs, rel, exact))
1060 return srcs
1060 return srcs
1061
1061
1062 # abssrc: hgsep
1062 # abssrc: hgsep
1063 # relsrc: ossep
1063 # relsrc: ossep
1064 # otarget: ossep
1064 # otarget: ossep
1065 def copyfile(abssrc, relsrc, otarget, exact):
1065 def copyfile(abssrc, relsrc, otarget, exact):
1066 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1066 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1067 if '/' in abstarget:
1067 if '/' in abstarget:
1068 # We cannot normalize abstarget itself, this would prevent
1068 # We cannot normalize abstarget itself, this would prevent
1069 # case only renames, like a => A.
1069 # case only renames, like a => A.
1070 abspath, absname = abstarget.rsplit('/', 1)
1070 abspath, absname = abstarget.rsplit('/', 1)
1071 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1071 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1072 reltarget = repo.pathto(abstarget, cwd)
1072 reltarget = repo.pathto(abstarget, cwd)
1073 target = repo.wjoin(abstarget)
1073 target = repo.wjoin(abstarget)
1074 src = repo.wjoin(abssrc)
1074 src = repo.wjoin(abssrc)
1075 state = repo.dirstate[abstarget]
1075 state = repo.dirstate[abstarget]
1076
1076
1077 scmutil.checkportable(ui, abstarget)
1077 scmutil.checkportable(ui, abstarget)
1078
1078
1079 # check for collisions
1079 # check for collisions
1080 prevsrc = targets.get(abstarget)
1080 prevsrc = targets.get(abstarget)
1081 if prevsrc is not None:
1081 if prevsrc is not None:
1082 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1082 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1083 (reltarget, repo.pathto(abssrc, cwd),
1083 (reltarget, repo.pathto(abssrc, cwd),
1084 repo.pathto(prevsrc, cwd)))
1084 repo.pathto(prevsrc, cwd)))
1085 return
1085 return
1086
1086
1087 # check for overwrites
1087 # check for overwrites
1088 exists = os.path.lexists(target)
1088 exists = os.path.lexists(target)
1089 samefile = False
1089 samefile = False
1090 if exists and abssrc != abstarget:
1090 if exists and abssrc != abstarget:
1091 if (repo.dirstate.normalize(abssrc) ==
1091 if (repo.dirstate.normalize(abssrc) ==
1092 repo.dirstate.normalize(abstarget)):
1092 repo.dirstate.normalize(abstarget)):
1093 if not rename:
1093 if not rename:
1094 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1094 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1095 return
1095 return
1096 exists = False
1096 exists = False
1097 samefile = True
1097 samefile = True
1098
1098
1099 if not after and exists or after and state in 'mn':
1099 if not after and exists or after and state in 'mn':
1100 if not opts['force']:
1100 if not opts['force']:
1101 if state in 'mn':
1101 if state in 'mn':
1102 msg = _('%s: not overwriting - file already committed\n')
1102 msg = _('%s: not overwriting - file already committed\n')
1103 if after:
1103 if after:
1104 flags = '--after --force'
1104 flags = '--after --force'
1105 else:
1105 else:
1106 flags = '--force'
1106 flags = '--force'
1107 if rename:
1107 if rename:
1108 hint = _('(hg rename %s to replace the file by '
1108 hint = _('(hg rename %s to replace the file by '
1109 'recording a rename)\n') % flags
1109 'recording a rename)\n') % flags
1110 else:
1110 else:
1111 hint = _('(hg copy %s to replace the file by '
1111 hint = _('(hg copy %s to replace the file by '
1112 'recording a copy)\n') % flags
1112 'recording a copy)\n') % flags
1113 else:
1113 else:
1114 msg = _('%s: not overwriting - file exists\n')
1114 msg = _('%s: not overwriting - file exists\n')
1115 if rename:
1115 if rename:
1116 hint = _('(hg rename --after to record the rename)\n')
1116 hint = _('(hg rename --after to record the rename)\n')
1117 else:
1117 else:
1118 hint = _('(hg copy --after to record the copy)\n')
1118 hint = _('(hg copy --after to record the copy)\n')
1119 ui.warn(msg % reltarget)
1119 ui.warn(msg % reltarget)
1120 ui.warn(hint)
1120 ui.warn(hint)
1121 return
1121 return
1122
1122
1123 if after:
1123 if after:
1124 if not exists:
1124 if not exists:
1125 if rename:
1125 if rename:
1126 ui.warn(_('%s: not recording move - %s does not exist\n') %
1126 ui.warn(_('%s: not recording move - %s does not exist\n') %
1127 (relsrc, reltarget))
1127 (relsrc, reltarget))
1128 else:
1128 else:
1129 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1129 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1130 (relsrc, reltarget))
1130 (relsrc, reltarget))
1131 return
1131 return
1132 elif not dryrun:
1132 elif not dryrun:
1133 try:
1133 try:
1134 if exists:
1134 if exists:
1135 os.unlink(target)
1135 os.unlink(target)
1136 targetdir = os.path.dirname(target) or '.'
1136 targetdir = os.path.dirname(target) or '.'
1137 if not os.path.isdir(targetdir):
1137 if not os.path.isdir(targetdir):
1138 os.makedirs(targetdir)
1138 os.makedirs(targetdir)
1139 if samefile:
1139 if samefile:
1140 tmp = target + "~hgrename"
1140 tmp = target + "~hgrename"
1141 os.rename(src, tmp)
1141 os.rename(src, tmp)
1142 os.rename(tmp, target)
1142 os.rename(tmp, target)
1143 else:
1143 else:
1144 util.copyfile(src, target)
1144 util.copyfile(src, target)
1145 srcexists = True
1145 srcexists = True
1146 except IOError as inst:
1146 except IOError as inst:
1147 if inst.errno == errno.ENOENT:
1147 if inst.errno == errno.ENOENT:
1148 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1148 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1149 srcexists = False
1149 srcexists = False
1150 else:
1150 else:
1151 ui.warn(_('%s: cannot copy - %s\n') %
1151 ui.warn(_('%s: cannot copy - %s\n') %
1152 (relsrc, encoding.strtolocal(inst.strerror)))
1152 (relsrc, encoding.strtolocal(inst.strerror)))
1153 return True # report a failure
1153 return True # report a failure
1154
1154
1155 if ui.verbose or not exact:
1155 if ui.verbose or not exact:
1156 if rename:
1156 if rename:
1157 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1157 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1158 else:
1158 else:
1159 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1159 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1160
1160
1161 targets[abstarget] = abssrc
1161 targets[abstarget] = abssrc
1162
1162
1163 # fix up dirstate
1163 # fix up dirstate
1164 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1164 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1165 dryrun=dryrun, cwd=cwd)
1165 dryrun=dryrun, cwd=cwd)
1166 if rename and not dryrun:
1166 if rename and not dryrun:
1167 if not after and srcexists and not samefile:
1167 if not after and srcexists and not samefile:
1168 repo.wvfs.unlinkpath(abssrc)
1168 repo.wvfs.unlinkpath(abssrc)
1169 wctx.forget([abssrc])
1169 wctx.forget([abssrc])
1170
1170
1171 # pat: ossep
1171 # pat: ossep
1172 # dest ossep
1172 # dest ossep
1173 # srcs: list of (hgsep, hgsep, ossep, bool)
1173 # srcs: list of (hgsep, hgsep, ossep, bool)
1174 # return: function that takes hgsep and returns ossep
1174 # return: function that takes hgsep and returns ossep
1175 def targetpathfn(pat, dest, srcs):
1175 def targetpathfn(pat, dest, srcs):
1176 if os.path.isdir(pat):
1176 if os.path.isdir(pat):
1177 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1177 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1178 abspfx = util.localpath(abspfx)
1178 abspfx = util.localpath(abspfx)
1179 if destdirexists:
1179 if destdirexists:
1180 striplen = len(os.path.split(abspfx)[0])
1180 striplen = len(os.path.split(abspfx)[0])
1181 else:
1181 else:
1182 striplen = len(abspfx)
1182 striplen = len(abspfx)
1183 if striplen:
1183 if striplen:
1184 striplen += len(pycompat.ossep)
1184 striplen += len(pycompat.ossep)
1185 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1185 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1186 elif destdirexists:
1186 elif destdirexists:
1187 res = lambda p: os.path.join(dest,
1187 res = lambda p: os.path.join(dest,
1188 os.path.basename(util.localpath(p)))
1188 os.path.basename(util.localpath(p)))
1189 else:
1189 else:
1190 res = lambda p: dest
1190 res = lambda p: dest
1191 return res
1191 return res
1192
1192
1193 # pat: ossep
1193 # pat: ossep
1194 # dest ossep
1194 # dest ossep
1195 # srcs: list of (hgsep, hgsep, ossep, bool)
1195 # srcs: list of (hgsep, hgsep, ossep, bool)
1196 # return: function that takes hgsep and returns ossep
1196 # return: function that takes hgsep and returns ossep
1197 def targetpathafterfn(pat, dest, srcs):
1197 def targetpathafterfn(pat, dest, srcs):
1198 if matchmod.patkind(pat):
1198 if matchmod.patkind(pat):
1199 # a mercurial pattern
1199 # a mercurial pattern
1200 res = lambda p: os.path.join(dest,
1200 res = lambda p: os.path.join(dest,
1201 os.path.basename(util.localpath(p)))
1201 os.path.basename(util.localpath(p)))
1202 else:
1202 else:
1203 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1203 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1204 if len(abspfx) < len(srcs[0][0]):
1204 if len(abspfx) < len(srcs[0][0]):
1205 # A directory. Either the target path contains the last
1205 # A directory. Either the target path contains the last
1206 # component of the source path or it does not.
1206 # component of the source path or it does not.
1207 def evalpath(striplen):
1207 def evalpath(striplen):
1208 score = 0
1208 score = 0
1209 for s in srcs:
1209 for s in srcs:
1210 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1210 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1211 if os.path.lexists(t):
1211 if os.path.lexists(t):
1212 score += 1
1212 score += 1
1213 return score
1213 return score
1214
1214
1215 abspfx = util.localpath(abspfx)
1215 abspfx = util.localpath(abspfx)
1216 striplen = len(abspfx)
1216 striplen = len(abspfx)
1217 if striplen:
1217 if striplen:
1218 striplen += len(pycompat.ossep)
1218 striplen += len(pycompat.ossep)
1219 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1219 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1220 score = evalpath(striplen)
1220 score = evalpath(striplen)
1221 striplen1 = len(os.path.split(abspfx)[0])
1221 striplen1 = len(os.path.split(abspfx)[0])
1222 if striplen1:
1222 if striplen1:
1223 striplen1 += len(pycompat.ossep)
1223 striplen1 += len(pycompat.ossep)
1224 if evalpath(striplen1) > score:
1224 if evalpath(striplen1) > score:
1225 striplen = striplen1
1225 striplen = striplen1
1226 res = lambda p: os.path.join(dest,
1226 res = lambda p: os.path.join(dest,
1227 util.localpath(p)[striplen:])
1227 util.localpath(p)[striplen:])
1228 else:
1228 else:
1229 # a file
1229 # a file
1230 if destdirexists:
1230 if destdirexists:
1231 res = lambda p: os.path.join(dest,
1231 res = lambda p: os.path.join(dest,
1232 os.path.basename(util.localpath(p)))
1232 os.path.basename(util.localpath(p)))
1233 else:
1233 else:
1234 res = lambda p: dest
1234 res = lambda p: dest
1235 return res
1235 return res
1236
1236
1237 pats = scmutil.expandpats(pats)
1237 pats = scmutil.expandpats(pats)
1238 if not pats:
1238 if not pats:
1239 raise error.Abort(_('no source or destination specified'))
1239 raise error.Abort(_('no source or destination specified'))
1240 if len(pats) == 1:
1240 if len(pats) == 1:
1241 raise error.Abort(_('no destination specified'))
1241 raise error.Abort(_('no destination specified'))
1242 dest = pats.pop()
1242 dest = pats.pop()
1243 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1243 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1244 if not destdirexists:
1244 if not destdirexists:
1245 if len(pats) > 1 or matchmod.patkind(pats[0]):
1245 if len(pats) > 1 or matchmod.patkind(pats[0]):
1246 raise error.Abort(_('with multiple sources, destination must be an '
1246 raise error.Abort(_('with multiple sources, destination must be an '
1247 'existing directory'))
1247 'existing directory'))
1248 if util.endswithsep(dest):
1248 if util.endswithsep(dest):
1249 raise error.Abort(_('destination %s is not a directory') % dest)
1249 raise error.Abort(_('destination %s is not a directory') % dest)
1250
1250
1251 tfn = targetpathfn
1251 tfn = targetpathfn
1252 if after:
1252 if after:
1253 tfn = targetpathafterfn
1253 tfn = targetpathafterfn
1254 copylist = []
1254 copylist = []
1255 for pat in pats:
1255 for pat in pats:
1256 srcs = walkpat(pat)
1256 srcs = walkpat(pat)
1257 if not srcs:
1257 if not srcs:
1258 continue
1258 continue
1259 copylist.append((tfn(pat, dest, srcs), srcs))
1259 copylist.append((tfn(pat, dest, srcs), srcs))
1260 if not copylist:
1260 if not copylist:
1261 raise error.Abort(_('no files to copy'))
1261 raise error.Abort(_('no files to copy'))
1262
1262
1263 errors = 0
1263 errors = 0
1264 for targetpath, srcs in copylist:
1264 for targetpath, srcs in copylist:
1265 for abssrc, relsrc, exact in srcs:
1265 for abssrc, relsrc, exact in srcs:
1266 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1266 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1267 errors += 1
1267 errors += 1
1268
1268
1269 if errors:
1269 if errors:
1270 ui.warn(_('(consider using --after)\n'))
1270 ui.warn(_('(consider using --after)\n'))
1271
1271
1272 return errors != 0
1272 return errors != 0
1273
1273
1274 ## facility to let extension process additional data into an import patch
1274 ## facility to let extension process additional data into an import patch
1275 # list of identifier to be executed in order
1275 # list of identifier to be executed in order
1276 extrapreimport = [] # run before commit
1276 extrapreimport = [] # run before commit
1277 extrapostimport = [] # run after commit
1277 extrapostimport = [] # run after commit
1278 # mapping from identifier to actual import function
1278 # mapping from identifier to actual import function
1279 #
1279 #
1280 # 'preimport' are run before the commit is made and are provided the following
1280 # 'preimport' are run before the commit is made and are provided the following
1281 # arguments:
1281 # arguments:
1282 # - repo: the localrepository instance,
1282 # - repo: the localrepository instance,
1283 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1283 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1284 # - extra: the future extra dictionary of the changeset, please mutate it,
1284 # - extra: the future extra dictionary of the changeset, please mutate it,
1285 # - opts: the import options.
1285 # - opts: the import options.
1286 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1286 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1287 # mutation of in memory commit and more. Feel free to rework the code to get
1287 # mutation of in memory commit and more. Feel free to rework the code to get
1288 # there.
1288 # there.
1289 extrapreimportmap = {}
1289 extrapreimportmap = {}
1290 # 'postimport' are run after the commit is made and are provided the following
1290 # 'postimport' are run after the commit is made and are provided the following
1291 # argument:
1291 # argument:
1292 # - ctx: the changectx created by import.
1292 # - ctx: the changectx created by import.
1293 extrapostimportmap = {}
1293 extrapostimportmap = {}
1294
1294
1295 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1295 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1296 """Utility function used by commands.import to import a single patch
1296 """Utility function used by commands.import to import a single patch
1297
1297
1298 This function is explicitly defined here to help the evolve extension to
1298 This function is explicitly defined here to help the evolve extension to
1299 wrap this part of the import logic.
1299 wrap this part of the import logic.
1300
1300
1301 The API is currently a bit ugly because it a simple code translation from
1301 The API is currently a bit ugly because it a simple code translation from
1302 the import command. Feel free to make it better.
1302 the import command. Feel free to make it better.
1303
1303
1304 :hunk: a patch (as a binary string)
1304 :hunk: a patch (as a binary string)
1305 :parents: nodes that will be parent of the created commit
1305 :parents: nodes that will be parent of the created commit
1306 :opts: the full dict of option passed to the import command
1306 :opts: the full dict of option passed to the import command
1307 :msgs: list to save commit message to.
1307 :msgs: list to save commit message to.
1308 (used in case we need to save it when failing)
1308 (used in case we need to save it when failing)
1309 :updatefunc: a function that update a repo to a given node
1309 :updatefunc: a function that update a repo to a given node
1310 updatefunc(<repo>, <node>)
1310 updatefunc(<repo>, <node>)
1311 """
1311 """
1312 # avoid cycle context -> subrepo -> cmdutil
1312 # avoid cycle context -> subrepo -> cmdutil
1313 from . import context
1313 from . import context
1314 extractdata = patch.extract(ui, hunk)
1314 extractdata = patch.extract(ui, hunk)
1315 tmpname = extractdata.get('filename')
1315 tmpname = extractdata.get('filename')
1316 message = extractdata.get('message')
1316 message = extractdata.get('message')
1317 user = opts.get('user') or extractdata.get('user')
1317 user = opts.get('user') or extractdata.get('user')
1318 date = opts.get('date') or extractdata.get('date')
1318 date = opts.get('date') or extractdata.get('date')
1319 branch = extractdata.get('branch')
1319 branch = extractdata.get('branch')
1320 nodeid = extractdata.get('nodeid')
1320 nodeid = extractdata.get('nodeid')
1321 p1 = extractdata.get('p1')
1321 p1 = extractdata.get('p1')
1322 p2 = extractdata.get('p2')
1322 p2 = extractdata.get('p2')
1323
1323
1324 nocommit = opts.get('no_commit')
1324 nocommit = opts.get('no_commit')
1325 importbranch = opts.get('import_branch')
1325 importbranch = opts.get('import_branch')
1326 update = not opts.get('bypass')
1326 update = not opts.get('bypass')
1327 strip = opts["strip"]
1327 strip = opts["strip"]
1328 prefix = opts["prefix"]
1328 prefix = opts["prefix"]
1329 sim = float(opts.get('similarity') or 0)
1329 sim = float(opts.get('similarity') or 0)
1330 if not tmpname:
1330 if not tmpname:
1331 return (None, None, False)
1331 return (None, None, False)
1332
1332
1333 rejects = False
1333 rejects = False
1334
1334
1335 try:
1335 try:
1336 cmdline_message = logmessage(ui, opts)
1336 cmdline_message = logmessage(ui, opts)
1337 if cmdline_message:
1337 if cmdline_message:
1338 # pickup the cmdline msg
1338 # pickup the cmdline msg
1339 message = cmdline_message
1339 message = cmdline_message
1340 elif message:
1340 elif message:
1341 # pickup the patch msg
1341 # pickup the patch msg
1342 message = message.strip()
1342 message = message.strip()
1343 else:
1343 else:
1344 # launch the editor
1344 # launch the editor
1345 message = None
1345 message = None
1346 ui.debug('message:\n%s\n' % message)
1346 ui.debug('message:\n%s\n' % message)
1347
1347
1348 if len(parents) == 1:
1348 if len(parents) == 1:
1349 parents.append(repo[nullid])
1349 parents.append(repo[nullid])
1350 if opts.get('exact'):
1350 if opts.get('exact'):
1351 if not nodeid or not p1:
1351 if not nodeid or not p1:
1352 raise error.Abort(_('not a Mercurial patch'))
1352 raise error.Abort(_('not a Mercurial patch'))
1353 p1 = repo[p1]
1353 p1 = repo[p1]
1354 p2 = repo[p2 or nullid]
1354 p2 = repo[p2 or nullid]
1355 elif p2:
1355 elif p2:
1356 try:
1356 try:
1357 p1 = repo[p1]
1357 p1 = repo[p1]
1358 p2 = repo[p2]
1358 p2 = repo[p2]
1359 # Without any options, consider p2 only if the
1359 # Without any options, consider p2 only if the
1360 # patch is being applied on top of the recorded
1360 # patch is being applied on top of the recorded
1361 # first parent.
1361 # first parent.
1362 if p1 != parents[0]:
1362 if p1 != parents[0]:
1363 p1 = parents[0]
1363 p1 = parents[0]
1364 p2 = repo[nullid]
1364 p2 = repo[nullid]
1365 except error.RepoError:
1365 except error.RepoError:
1366 p1, p2 = parents
1366 p1, p2 = parents
1367 if p2.node() == nullid:
1367 if p2.node() == nullid:
1368 ui.warn(_("warning: import the patch as a normal revision\n"
1368 ui.warn(_("warning: import the patch as a normal revision\n"
1369 "(use --exact to import the patch as a merge)\n"))
1369 "(use --exact to import the patch as a merge)\n"))
1370 else:
1370 else:
1371 p1, p2 = parents
1371 p1, p2 = parents
1372
1372
1373 n = None
1373 n = None
1374 if update:
1374 if update:
1375 if p1 != parents[0]:
1375 if p1 != parents[0]:
1376 updatefunc(repo, p1.node())
1376 updatefunc(repo, p1.node())
1377 if p2 != parents[1]:
1377 if p2 != parents[1]:
1378 repo.setparents(p1.node(), p2.node())
1378 repo.setparents(p1.node(), p2.node())
1379
1379
1380 if opts.get('exact') or importbranch:
1380 if opts.get('exact') or importbranch:
1381 repo.dirstate.setbranch(branch or 'default')
1381 repo.dirstate.setbranch(branch or 'default')
1382
1382
1383 partial = opts.get('partial', False)
1383 partial = opts.get('partial', False)
1384 files = set()
1384 files = set()
1385 try:
1385 try:
1386 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1386 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1387 files=files, eolmode=None, similarity=sim / 100.0)
1387 files=files, eolmode=None, similarity=sim / 100.0)
1388 except error.PatchError as e:
1388 except error.PatchError as e:
1389 if not partial:
1389 if not partial:
1390 raise error.Abort(str(e))
1390 raise error.Abort(str(e))
1391 if partial:
1391 if partial:
1392 rejects = True
1392 rejects = True
1393
1393
1394 files = list(files)
1394 files = list(files)
1395 if nocommit:
1395 if nocommit:
1396 if message:
1396 if message:
1397 msgs.append(message)
1397 msgs.append(message)
1398 else:
1398 else:
1399 if opts.get('exact') or p2:
1399 if opts.get('exact') or p2:
1400 # If you got here, you either use --force and know what
1400 # If you got here, you either use --force and know what
1401 # you are doing or used --exact or a merge patch while
1401 # you are doing or used --exact or a merge patch while
1402 # being updated to its first parent.
1402 # being updated to its first parent.
1403 m = None
1403 m = None
1404 else:
1404 else:
1405 m = scmutil.matchfiles(repo, files or [])
1405 m = scmutil.matchfiles(repo, files or [])
1406 editform = mergeeditform(repo[None], 'import.normal')
1406 editform = mergeeditform(repo[None], 'import.normal')
1407 if opts.get('exact'):
1407 if opts.get('exact'):
1408 editor = None
1408 editor = None
1409 else:
1409 else:
1410 editor = getcommiteditor(editform=editform,
1410 editor = getcommiteditor(editform=editform,
1411 **pycompat.strkwargs(opts))
1411 **pycompat.strkwargs(opts))
1412 extra = {}
1412 extra = {}
1413 for idfunc in extrapreimport:
1413 for idfunc in extrapreimport:
1414 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1414 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1415 overrides = {}
1415 overrides = {}
1416 if partial:
1416 if partial:
1417 overrides[('ui', 'allowemptycommit')] = True
1417 overrides[('ui', 'allowemptycommit')] = True
1418 with repo.ui.configoverride(overrides, 'import'):
1418 with repo.ui.configoverride(overrides, 'import'):
1419 n = repo.commit(message, user,
1419 n = repo.commit(message, user,
1420 date, match=m,
1420 date, match=m,
1421 editor=editor, extra=extra)
1421 editor=editor, extra=extra)
1422 for idfunc in extrapostimport:
1422 for idfunc in extrapostimport:
1423 extrapostimportmap[idfunc](repo[n])
1423 extrapostimportmap[idfunc](repo[n])
1424 else:
1424 else:
1425 if opts.get('exact') or importbranch:
1425 if opts.get('exact') or importbranch:
1426 branch = branch or 'default'
1426 branch = branch or 'default'
1427 else:
1427 else:
1428 branch = p1.branch()
1428 branch = p1.branch()
1429 store = patch.filestore()
1429 store = patch.filestore()
1430 try:
1430 try:
1431 files = set()
1431 files = set()
1432 try:
1432 try:
1433 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1433 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1434 files, eolmode=None)
1434 files, eolmode=None)
1435 except error.PatchError as e:
1435 except error.PatchError as e:
1436 raise error.Abort(str(e))
1436 raise error.Abort(str(e))
1437 if opts.get('exact'):
1437 if opts.get('exact'):
1438 editor = None
1438 editor = None
1439 else:
1439 else:
1440 editor = getcommiteditor(editform='import.bypass')
1440 editor = getcommiteditor(editform='import.bypass')
1441 memctx = context.memctx(repo, (p1.node(), p2.node()),
1441 memctx = context.memctx(repo, (p1.node(), p2.node()),
1442 message,
1442 message,
1443 files=files,
1443 files=files,
1444 filectxfn=store,
1444 filectxfn=store,
1445 user=user,
1445 user=user,
1446 date=date,
1446 date=date,
1447 branch=branch,
1447 branch=branch,
1448 editor=editor)
1448 editor=editor)
1449 n = memctx.commit()
1449 n = memctx.commit()
1450 finally:
1450 finally:
1451 store.close()
1451 store.close()
1452 if opts.get('exact') and nocommit:
1452 if opts.get('exact') and nocommit:
1453 # --exact with --no-commit is still useful in that it does merge
1453 # --exact with --no-commit is still useful in that it does merge
1454 # and branch bits
1454 # and branch bits
1455 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1455 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1456 elif opts.get('exact') and hex(n) != nodeid:
1456 elif opts.get('exact') and hex(n) != nodeid:
1457 raise error.Abort(_('patch is damaged or loses information'))
1457 raise error.Abort(_('patch is damaged or loses information'))
1458 msg = _('applied to working directory')
1458 msg = _('applied to working directory')
1459 if n:
1459 if n:
1460 # i18n: refers to a short changeset id
1460 # i18n: refers to a short changeset id
1461 msg = _('created %s') % short(n)
1461 msg = _('created %s') % short(n)
1462 return (msg, n, rejects)
1462 return (msg, n, rejects)
1463 finally:
1463 finally:
1464 os.unlink(tmpname)
1464 os.unlink(tmpname)
1465
1465
1466 # facility to let extensions include additional data in an exported patch
1466 # facility to let extensions include additional data in an exported patch
1467 # list of identifiers to be executed in order
1467 # list of identifiers to be executed in order
1468 extraexport = []
1468 extraexport = []
1469 # mapping from identifier to actual export function
1469 # mapping from identifier to actual export function
1470 # function as to return a string to be added to the header or None
1470 # function as to return a string to be added to the header or None
1471 # it is given two arguments (sequencenumber, changectx)
1471 # it is given two arguments (sequencenumber, changectx)
1472 extraexportmap = {}
1472 extraexportmap = {}
1473
1473
1474 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1474 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1475 node = scmutil.binnode(ctx)
1475 node = scmutil.binnode(ctx)
1476 parents = [p.node() for p in ctx.parents() if p]
1476 parents = [p.node() for p in ctx.parents() if p]
1477 branch = ctx.branch()
1477 branch = ctx.branch()
1478 if switch_parent:
1478 if switch_parent:
1479 parents.reverse()
1479 parents.reverse()
1480
1480
1481 if parents:
1481 if parents:
1482 prev = parents[0]
1482 prev = parents[0]
1483 else:
1483 else:
1484 prev = nullid
1484 prev = nullid
1485
1485
1486 write("# HG changeset patch\n")
1486 write("# HG changeset patch\n")
1487 write("# User %s\n" % ctx.user())
1487 write("# User %s\n" % ctx.user())
1488 write("# Date %d %d\n" % ctx.date())
1488 write("# Date %d %d\n" % ctx.date())
1489 write("# %s\n" % util.datestr(ctx.date()))
1489 write("# %s\n" % util.datestr(ctx.date()))
1490 if branch and branch != 'default':
1490 if branch and branch != 'default':
1491 write("# Branch %s\n" % branch)
1491 write("# Branch %s\n" % branch)
1492 write("# Node ID %s\n" % hex(node))
1492 write("# Node ID %s\n" % hex(node))
1493 write("# Parent %s\n" % hex(prev))
1493 write("# Parent %s\n" % hex(prev))
1494 if len(parents) > 1:
1494 if len(parents) > 1:
1495 write("# Parent %s\n" % hex(parents[1]))
1495 write("# Parent %s\n" % hex(parents[1]))
1496
1496
1497 for headerid in extraexport:
1497 for headerid in extraexport:
1498 header = extraexportmap[headerid](seqno, ctx)
1498 header = extraexportmap[headerid](seqno, ctx)
1499 if header is not None:
1499 if header is not None:
1500 write('# %s\n' % header)
1500 write('# %s\n' % header)
1501 write(ctx.description().rstrip())
1501 write(ctx.description().rstrip())
1502 write("\n\n")
1502 write("\n\n")
1503
1503
1504 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1504 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1505 write(chunk, label=label)
1505 write(chunk, label=label)
1506
1506
1507 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1507 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1508 opts=None, match=None):
1508 opts=None, match=None):
1509 '''export changesets as hg patches
1509 '''export changesets as hg patches
1510
1510
1511 Args:
1511 Args:
1512 repo: The repository from which we're exporting revisions.
1512 repo: The repository from which we're exporting revisions.
1513 revs: A list of revisions to export as revision numbers.
1513 revs: A list of revisions to export as revision numbers.
1514 fntemplate: An optional string to use for generating patch file names.
1514 fntemplate: An optional string to use for generating patch file names.
1515 fp: An optional file-like object to which patches should be written.
1515 fp: An optional file-like object to which patches should be written.
1516 switch_parent: If True, show diffs against second parent when not nullid.
1516 switch_parent: If True, show diffs against second parent when not nullid.
1517 Default is false, which always shows diff against p1.
1517 Default is false, which always shows diff against p1.
1518 opts: diff options to use for generating the patch.
1518 opts: diff options to use for generating the patch.
1519 match: If specified, only export changes to files matching this matcher.
1519 match: If specified, only export changes to files matching this matcher.
1520
1520
1521 Returns:
1521 Returns:
1522 Nothing.
1522 Nothing.
1523
1523
1524 Side Effect:
1524 Side Effect:
1525 "HG Changeset Patch" data is emitted to one of the following
1525 "HG Changeset Patch" data is emitted to one of the following
1526 destinations:
1526 destinations:
1527 fp is specified: All revs are written to the specified
1527 fp is specified: All revs are written to the specified
1528 file-like object.
1528 file-like object.
1529 fntemplate specified: Each rev is written to a unique file named using
1529 fntemplate specified: Each rev is written to a unique file named using
1530 the given template.
1530 the given template.
1531 Neither fp nor template specified: All revs written to repo.ui.write()
1531 Neither fp nor template specified: All revs written to repo.ui.write()
1532 '''
1532 '''
1533
1533
1534 total = len(revs)
1534 total = len(revs)
1535 revwidth = max(len(str(rev)) for rev in revs)
1535 revwidth = max(len(str(rev)) for rev in revs)
1536 filemode = {}
1536 filemode = {}
1537
1537
1538 write = None
1538 write = None
1539 dest = '<unnamed>'
1539 dest = '<unnamed>'
1540 if fp:
1540 if fp:
1541 dest = getattr(fp, 'name', dest)
1541 dest = getattr(fp, 'name', dest)
1542 def write(s, **kw):
1542 def write(s, **kw):
1543 fp.write(s)
1543 fp.write(s)
1544 elif not fntemplate:
1544 elif not fntemplate:
1545 write = repo.ui.write
1545 write = repo.ui.write
1546
1546
1547 for seqno, rev in enumerate(revs, 1):
1547 for seqno, rev in enumerate(revs, 1):
1548 ctx = repo[rev]
1548 ctx = repo[rev]
1549 fo = None
1549 fo = None
1550 if not fp and fntemplate:
1550 if not fp and fntemplate:
1551 desc_lines = ctx.description().rstrip().split('\n')
1551 desc_lines = ctx.description().rstrip().split('\n')
1552 desc = desc_lines[0] #Commit always has a first line.
1552 desc = desc_lines[0] #Commit always has a first line.
1553 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1553 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1554 total=total, seqno=seqno, revwidth=revwidth,
1554 total=total, seqno=seqno, revwidth=revwidth,
1555 mode='wb', modemap=filemode)
1555 mode='wb', modemap=filemode)
1556 dest = fo.name
1556 dest = fo.name
1557 def write(s, **kw):
1557 def write(s, **kw):
1558 fo.write(s)
1558 fo.write(s)
1559 if not dest.startswith('<'):
1559 if not dest.startswith('<'):
1560 repo.ui.note("%s\n" % dest)
1560 repo.ui.note("%s\n" % dest)
1561 _exportsingle(
1561 _exportsingle(
1562 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1562 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1563 if fo is not None:
1563 if fo is not None:
1564 fo.close()
1564 fo.close()
1565
1565
1566 def showmarker(fm, marker, index=None):
1566 def showmarker(fm, marker, index=None):
1567 """utility function to display obsolescence marker in a readable way
1567 """utility function to display obsolescence marker in a readable way
1568
1568
1569 To be used by debug function."""
1569 To be used by debug function."""
1570 if index is not None:
1570 if index is not None:
1571 fm.write('index', '%i ', index)
1571 fm.write('index', '%i ', index)
1572 fm.write('prednode', '%s ', hex(marker.prednode()))
1572 fm.write('prednode', '%s ', hex(marker.prednode()))
1573 succs = marker.succnodes()
1573 succs = marker.succnodes()
1574 fm.condwrite(succs, 'succnodes', '%s ',
1574 fm.condwrite(succs, 'succnodes', '%s ',
1575 fm.formatlist(map(hex, succs), name='node'))
1575 fm.formatlist(map(hex, succs), name='node'))
1576 fm.write('flag', '%X ', marker.flags())
1576 fm.write('flag', '%X ', marker.flags())
1577 parents = marker.parentnodes()
1577 parents = marker.parentnodes()
1578 if parents is not None:
1578 if parents is not None:
1579 fm.write('parentnodes', '{%s} ',
1579 fm.write('parentnodes', '{%s} ',
1580 fm.formatlist(map(hex, parents), name='node', sep=', '))
1580 fm.formatlist(map(hex, parents), name='node', sep=', '))
1581 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1581 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1582 meta = marker.metadata().copy()
1582 meta = marker.metadata().copy()
1583 meta.pop('date', None)
1583 meta.pop('date', None)
1584 smeta = util.rapply(pycompat.maybebytestr, meta)
1584 smeta = util.rapply(pycompat.maybebytestr, meta)
1585 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1585 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1586 fm.plain('\n')
1586 fm.plain('\n')
1587
1587
1588 def finddate(ui, repo, date):
1588 def finddate(ui, repo, date):
1589 """Find the tipmost changeset that matches the given date spec"""
1589 """Find the tipmost changeset that matches the given date spec"""
1590
1590
1591 df = util.matchdate(date)
1591 df = util.matchdate(date)
1592 m = scmutil.matchall(repo)
1592 m = scmutil.matchall(repo)
1593 results = {}
1593 results = {}
1594
1594
1595 def prep(ctx, fns):
1595 def prep(ctx, fns):
1596 d = ctx.date()
1596 d = ctx.date()
1597 if df(d[0]):
1597 if df(d[0]):
1598 results[ctx.rev()] = d
1598 results[ctx.rev()] = d
1599
1599
1600 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1600 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1601 rev = ctx.rev()
1601 rev = ctx.rev()
1602 if rev in results:
1602 if rev in results:
1603 ui.status(_("found revision %s from %s\n") %
1603 ui.status(_("found revision %s from %s\n") %
1604 (rev, util.datestr(results[rev])))
1604 (rev, util.datestr(results[rev])))
1605 return '%d' % rev
1605 return '%d' % rev
1606
1606
1607 raise error.Abort(_("revision matching date not found"))
1607 raise error.Abort(_("revision matching date not found"))
1608
1608
1609 def increasingwindows(windowsize=8, sizelimit=512):
1609 def increasingwindows(windowsize=8, sizelimit=512):
1610 while True:
1610 while True:
1611 yield windowsize
1611 yield windowsize
1612 if windowsize < sizelimit:
1612 if windowsize < sizelimit:
1613 windowsize *= 2
1613 windowsize *= 2
1614
1614
1615 def _walkrevs(repo, opts):
1615 def _walkrevs(repo, opts):
1616 # Default --rev value depends on --follow but --follow behavior
1616 # Default --rev value depends on --follow but --follow behavior
1617 # depends on revisions resolved from --rev...
1617 # depends on revisions resolved from --rev...
1618 follow = opts.get('follow') or opts.get('follow_first')
1618 follow = opts.get('follow') or opts.get('follow_first')
1619 if opts.get('rev'):
1619 if opts.get('rev'):
1620 revs = scmutil.revrange(repo, opts['rev'])
1620 revs = scmutil.revrange(repo, opts['rev'])
1621 elif follow and repo.dirstate.p1() == nullid:
1621 elif follow and repo.dirstate.p1() == nullid:
1622 revs = smartset.baseset()
1622 revs = smartset.baseset()
1623 elif follow:
1623 elif follow:
1624 revs = repo.revs('reverse(:.)')
1624 revs = repo.revs('reverse(:.)')
1625 else:
1625 else:
1626 revs = smartset.spanset(repo)
1626 revs = smartset.spanset(repo)
1627 revs.reverse()
1627 revs.reverse()
1628 return revs
1628 return revs
1629
1629
1630 class FileWalkError(Exception):
1630 class FileWalkError(Exception):
1631 pass
1631 pass
1632
1632
1633 def walkfilerevs(repo, match, follow, revs, fncache):
1633 def walkfilerevs(repo, match, follow, revs, fncache):
1634 '''Walks the file history for the matched files.
1634 '''Walks the file history for the matched files.
1635
1635
1636 Returns the changeset revs that are involved in the file history.
1636 Returns the changeset revs that are involved in the file history.
1637
1637
1638 Throws FileWalkError if the file history can't be walked using
1638 Throws FileWalkError if the file history can't be walked using
1639 filelogs alone.
1639 filelogs alone.
1640 '''
1640 '''
1641 wanted = set()
1641 wanted = set()
1642 copies = []
1642 copies = []
1643 minrev, maxrev = min(revs), max(revs)
1643 minrev, maxrev = min(revs), max(revs)
1644 def filerevgen(filelog, last):
1644 def filerevgen(filelog, last):
1645 """
1645 """
1646 Only files, no patterns. Check the history of each file.
1646 Only files, no patterns. Check the history of each file.
1647
1647
1648 Examines filelog entries within minrev, maxrev linkrev range
1648 Examines filelog entries within minrev, maxrev linkrev range
1649 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1649 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1650 tuples in backwards order
1650 tuples in backwards order
1651 """
1651 """
1652 cl_count = len(repo)
1652 cl_count = len(repo)
1653 revs = []
1653 revs = []
1654 for j in xrange(0, last + 1):
1654 for j in xrange(0, last + 1):
1655 linkrev = filelog.linkrev(j)
1655 linkrev = filelog.linkrev(j)
1656 if linkrev < minrev:
1656 if linkrev < minrev:
1657 continue
1657 continue
1658 # only yield rev for which we have the changelog, it can
1658 # only yield rev for which we have the changelog, it can
1659 # happen while doing "hg log" during a pull or commit
1659 # happen while doing "hg log" during a pull or commit
1660 if linkrev >= cl_count:
1660 if linkrev >= cl_count:
1661 break
1661 break
1662
1662
1663 parentlinkrevs = []
1663 parentlinkrevs = []
1664 for p in filelog.parentrevs(j):
1664 for p in filelog.parentrevs(j):
1665 if p != nullrev:
1665 if p != nullrev:
1666 parentlinkrevs.append(filelog.linkrev(p))
1666 parentlinkrevs.append(filelog.linkrev(p))
1667 n = filelog.node(j)
1667 n = filelog.node(j)
1668 revs.append((linkrev, parentlinkrevs,
1668 revs.append((linkrev, parentlinkrevs,
1669 follow and filelog.renamed(n)))
1669 follow and filelog.renamed(n)))
1670
1670
1671 return reversed(revs)
1671 return reversed(revs)
1672 def iterfiles():
1672 def iterfiles():
1673 pctx = repo['.']
1673 pctx = repo['.']
1674 for filename in match.files():
1674 for filename in match.files():
1675 if follow:
1675 if follow:
1676 if filename not in pctx:
1676 if filename not in pctx:
1677 raise error.Abort(_('cannot follow file not in parent '
1677 raise error.Abort(_('cannot follow file not in parent '
1678 'revision: "%s"') % filename)
1678 'revision: "%s"') % filename)
1679 yield filename, pctx[filename].filenode()
1679 yield filename, pctx[filename].filenode()
1680 else:
1680 else:
1681 yield filename, None
1681 yield filename, None
1682 for filename_node in copies:
1682 for filename_node in copies:
1683 yield filename_node
1683 yield filename_node
1684
1684
1685 for file_, node in iterfiles():
1685 for file_, node in iterfiles():
1686 filelog = repo.file(file_)
1686 filelog = repo.file(file_)
1687 if not len(filelog):
1687 if not len(filelog):
1688 if node is None:
1688 if node is None:
1689 # A zero count may be a directory or deleted file, so
1689 # A zero count may be a directory or deleted file, so
1690 # try to find matching entries on the slow path.
1690 # try to find matching entries on the slow path.
1691 if follow:
1691 if follow:
1692 raise error.Abort(
1692 raise error.Abort(
1693 _('cannot follow nonexistent file: "%s"') % file_)
1693 _('cannot follow nonexistent file: "%s"') % file_)
1694 raise FileWalkError("Cannot walk via filelog")
1694 raise FileWalkError("Cannot walk via filelog")
1695 else:
1695 else:
1696 continue
1696 continue
1697
1697
1698 if node is None:
1698 if node is None:
1699 last = len(filelog) - 1
1699 last = len(filelog) - 1
1700 else:
1700 else:
1701 last = filelog.rev(node)
1701 last = filelog.rev(node)
1702
1702
1703 # keep track of all ancestors of the file
1703 # keep track of all ancestors of the file
1704 ancestors = {filelog.linkrev(last)}
1704 ancestors = {filelog.linkrev(last)}
1705
1705
1706 # iterate from latest to oldest revision
1706 # iterate from latest to oldest revision
1707 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1707 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1708 if not follow:
1708 if not follow:
1709 if rev > maxrev:
1709 if rev > maxrev:
1710 continue
1710 continue
1711 else:
1711 else:
1712 # Note that last might not be the first interesting
1712 # Note that last might not be the first interesting
1713 # rev to us:
1713 # rev to us:
1714 # if the file has been changed after maxrev, we'll
1714 # if the file has been changed after maxrev, we'll
1715 # have linkrev(last) > maxrev, and we still need
1715 # have linkrev(last) > maxrev, and we still need
1716 # to explore the file graph
1716 # to explore the file graph
1717 if rev not in ancestors:
1717 if rev not in ancestors:
1718 continue
1718 continue
1719 # XXX insert 1327 fix here
1719 # XXX insert 1327 fix here
1720 if flparentlinkrevs:
1720 if flparentlinkrevs:
1721 ancestors.update(flparentlinkrevs)
1721 ancestors.update(flparentlinkrevs)
1722
1722
1723 fncache.setdefault(rev, []).append(file_)
1723 fncache.setdefault(rev, []).append(file_)
1724 wanted.add(rev)
1724 wanted.add(rev)
1725 if copied:
1725 if copied:
1726 copies.append(copied)
1726 copies.append(copied)
1727
1727
1728 return wanted
1728 return wanted
1729
1729
1730 class _followfilter(object):
1730 class _followfilter(object):
1731 def __init__(self, repo, onlyfirst=False):
1731 def __init__(self, repo, onlyfirst=False):
1732 self.repo = repo
1732 self.repo = repo
1733 self.startrev = nullrev
1733 self.startrev = nullrev
1734 self.roots = set()
1734 self.roots = set()
1735 self.onlyfirst = onlyfirst
1735 self.onlyfirst = onlyfirst
1736
1736
1737 def match(self, rev):
1737 def match(self, rev):
1738 def realparents(rev):
1738 def realparents(rev):
1739 if self.onlyfirst:
1739 if self.onlyfirst:
1740 return self.repo.changelog.parentrevs(rev)[0:1]
1740 return self.repo.changelog.parentrevs(rev)[0:1]
1741 else:
1741 else:
1742 return filter(lambda x: x != nullrev,
1742 return filter(lambda x: x != nullrev,
1743 self.repo.changelog.parentrevs(rev))
1743 self.repo.changelog.parentrevs(rev))
1744
1744
1745 if self.startrev == nullrev:
1745 if self.startrev == nullrev:
1746 self.startrev = rev
1746 self.startrev = rev
1747 return True
1747 return True
1748
1748
1749 if rev > self.startrev:
1749 if rev > self.startrev:
1750 # forward: all descendants
1750 # forward: all descendants
1751 if not self.roots:
1751 if not self.roots:
1752 self.roots.add(self.startrev)
1752 self.roots.add(self.startrev)
1753 for parent in realparents(rev):
1753 for parent in realparents(rev):
1754 if parent in self.roots:
1754 if parent in self.roots:
1755 self.roots.add(rev)
1755 self.roots.add(rev)
1756 return True
1756 return True
1757 else:
1757 else:
1758 # backwards: all parents
1758 # backwards: all parents
1759 if not self.roots:
1759 if not self.roots:
1760 self.roots.update(realparents(self.startrev))
1760 self.roots.update(realparents(self.startrev))
1761 if rev in self.roots:
1761 if rev in self.roots:
1762 self.roots.remove(rev)
1762 self.roots.remove(rev)
1763 self.roots.update(realparents(rev))
1763 self.roots.update(realparents(rev))
1764 return True
1764 return True
1765
1765
1766 return False
1766 return False
1767
1767
1768 def walkchangerevs(repo, match, opts, prepare):
1768 def walkchangerevs(repo, match, opts, prepare):
1769 '''Iterate over files and the revs in which they changed.
1769 '''Iterate over files and the revs in which they changed.
1770
1770
1771 Callers most commonly need to iterate backwards over the history
1771 Callers most commonly need to iterate backwards over the history
1772 in which they are interested. Doing so has awful (quadratic-looking)
1772 in which they are interested. Doing so has awful (quadratic-looking)
1773 performance, so we use iterators in a "windowed" way.
1773 performance, so we use iterators in a "windowed" way.
1774
1774
1775 We walk a window of revisions in the desired order. Within the
1775 We walk a window of revisions in the desired order. Within the
1776 window, we first walk forwards to gather data, then in the desired
1776 window, we first walk forwards to gather data, then in the desired
1777 order (usually backwards) to display it.
1777 order (usually backwards) to display it.
1778
1778
1779 This function returns an iterator yielding contexts. Before
1779 This function returns an iterator yielding contexts. Before
1780 yielding each context, the iterator will first call the prepare
1780 yielding each context, the iterator will first call the prepare
1781 function on each context in the window in forward order.'''
1781 function on each context in the window in forward order.'''
1782
1782
1783 follow = opts.get('follow') or opts.get('follow_first')
1783 follow = opts.get('follow') or opts.get('follow_first')
1784 revs = _walkrevs(repo, opts)
1784 revs = _walkrevs(repo, opts)
1785 if not revs:
1785 if not revs:
1786 return []
1786 return []
1787 wanted = set()
1787 wanted = set()
1788 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1788 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1789 fncache = {}
1789 fncache = {}
1790 change = repo.changectx
1790 change = repo.changectx
1791
1791
1792 # First step is to fill wanted, the set of revisions that we want to yield.
1792 # First step is to fill wanted, the set of revisions that we want to yield.
1793 # When it does not induce extra cost, we also fill fncache for revisions in
1793 # When it does not induce extra cost, we also fill fncache for revisions in
1794 # wanted: a cache of filenames that were changed (ctx.files()) and that
1794 # wanted: a cache of filenames that were changed (ctx.files()) and that
1795 # match the file filtering conditions.
1795 # match the file filtering conditions.
1796
1796
1797 if match.always():
1797 if match.always():
1798 # No files, no patterns. Display all revs.
1798 # No files, no patterns. Display all revs.
1799 wanted = revs
1799 wanted = revs
1800 elif not slowpath:
1800 elif not slowpath:
1801 # We only have to read through the filelog to find wanted revisions
1801 # We only have to read through the filelog to find wanted revisions
1802
1802
1803 try:
1803 try:
1804 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1804 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1805 except FileWalkError:
1805 except FileWalkError:
1806 slowpath = True
1806 slowpath = True
1807
1807
1808 # We decided to fall back to the slowpath because at least one
1808 # We decided to fall back to the slowpath because at least one
1809 # of the paths was not a file. Check to see if at least one of them
1809 # of the paths was not a file. Check to see if at least one of them
1810 # existed in history, otherwise simply return
1810 # existed in history, otherwise simply return
1811 for path in match.files():
1811 for path in match.files():
1812 if path == '.' or path in repo.store:
1812 if path == '.' or path in repo.store:
1813 break
1813 break
1814 else:
1814 else:
1815 return []
1815 return []
1816
1816
1817 if slowpath:
1817 if slowpath:
1818 # We have to read the changelog to match filenames against
1818 # We have to read the changelog to match filenames against
1819 # changed files
1819 # changed files
1820
1820
1821 if follow:
1821 if follow:
1822 raise error.Abort(_('can only follow copies/renames for explicit '
1822 raise error.Abort(_('can only follow copies/renames for explicit '
1823 'filenames'))
1823 'filenames'))
1824
1824
1825 # The slow path checks files modified in every changeset.
1825 # The slow path checks files modified in every changeset.
1826 # This is really slow on large repos, so compute the set lazily.
1826 # This is really slow on large repos, so compute the set lazily.
1827 class lazywantedset(object):
1827 class lazywantedset(object):
1828 def __init__(self):
1828 def __init__(self):
1829 self.set = set()
1829 self.set = set()
1830 self.revs = set(revs)
1830 self.revs = set(revs)
1831
1831
1832 # No need to worry about locality here because it will be accessed
1832 # No need to worry about locality here because it will be accessed
1833 # in the same order as the increasing window below.
1833 # in the same order as the increasing window below.
1834 def __contains__(self, value):
1834 def __contains__(self, value):
1835 if value in self.set:
1835 if value in self.set:
1836 return True
1836 return True
1837 elif not value in self.revs:
1837 elif not value in self.revs:
1838 return False
1838 return False
1839 else:
1839 else:
1840 self.revs.discard(value)
1840 self.revs.discard(value)
1841 ctx = change(value)
1841 ctx = change(value)
1842 matches = filter(match, ctx.files())
1842 matches = filter(match, ctx.files())
1843 if matches:
1843 if matches:
1844 fncache[value] = matches
1844 fncache[value] = matches
1845 self.set.add(value)
1845 self.set.add(value)
1846 return True
1846 return True
1847 return False
1847 return False
1848
1848
1849 def discard(self, value):
1849 def discard(self, value):
1850 self.revs.discard(value)
1850 self.revs.discard(value)
1851 self.set.discard(value)
1851 self.set.discard(value)
1852
1852
1853 wanted = lazywantedset()
1853 wanted = lazywantedset()
1854
1854
1855 # it might be worthwhile to do this in the iterator if the rev range
1855 # it might be worthwhile to do this in the iterator if the rev range
1856 # is descending and the prune args are all within that range
1856 # is descending and the prune args are all within that range
1857 for rev in opts.get('prune', ()):
1857 for rev in opts.get('prune', ()):
1858 rev = repo[rev].rev()
1858 rev = repo[rev].rev()
1859 ff = _followfilter(repo)
1859 ff = _followfilter(repo)
1860 stop = min(revs[0], revs[-1])
1860 stop = min(revs[0], revs[-1])
1861 for x in xrange(rev, stop - 1, -1):
1861 for x in xrange(rev, stop - 1, -1):
1862 if ff.match(x):
1862 if ff.match(x):
1863 wanted = wanted - [x]
1863 wanted = wanted - [x]
1864
1864
1865 # Now that wanted is correctly initialized, we can iterate over the
1865 # Now that wanted is correctly initialized, we can iterate over the
1866 # revision range, yielding only revisions in wanted.
1866 # revision range, yielding only revisions in wanted.
1867 def iterate():
1867 def iterate():
1868 if follow and match.always():
1868 if follow and match.always():
1869 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1869 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1870 def want(rev):
1870 def want(rev):
1871 return ff.match(rev) and rev in wanted
1871 return ff.match(rev) and rev in wanted
1872 else:
1872 else:
1873 def want(rev):
1873 def want(rev):
1874 return rev in wanted
1874 return rev in wanted
1875
1875
1876 it = iter(revs)
1876 it = iter(revs)
1877 stopiteration = False
1877 stopiteration = False
1878 for windowsize in increasingwindows():
1878 for windowsize in increasingwindows():
1879 nrevs = []
1879 nrevs = []
1880 for i in xrange(windowsize):
1880 for i in xrange(windowsize):
1881 rev = next(it, None)
1881 rev = next(it, None)
1882 if rev is None:
1882 if rev is None:
1883 stopiteration = True
1883 stopiteration = True
1884 break
1884 break
1885 elif want(rev):
1885 elif want(rev):
1886 nrevs.append(rev)
1886 nrevs.append(rev)
1887 for rev in sorted(nrevs):
1887 for rev in sorted(nrevs):
1888 fns = fncache.get(rev)
1888 fns = fncache.get(rev)
1889 ctx = change(rev)
1889 ctx = change(rev)
1890 if not fns:
1890 if not fns:
1891 def fns_generator():
1891 def fns_generator():
1892 for f in ctx.files():
1892 for f in ctx.files():
1893 if match(f):
1893 if match(f):
1894 yield f
1894 yield f
1895 fns = fns_generator()
1895 fns = fns_generator()
1896 prepare(ctx, fns)
1896 prepare(ctx, fns)
1897 for rev in nrevs:
1897 for rev in nrevs:
1898 yield change(rev)
1898 yield change(rev)
1899
1899
1900 if stopiteration:
1900 if stopiteration:
1901 break
1901 break
1902
1902
1903 return iterate()
1903 return iterate()
1904
1904
1905 def add(ui, repo, match, prefix, explicitonly, **opts):
1905 def add(ui, repo, match, prefix, explicitonly, **opts):
1906 join = lambda f: os.path.join(prefix, f)
1906 join = lambda f: os.path.join(prefix, f)
1907 bad = []
1907 bad = []
1908
1908
1909 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1909 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1910 names = []
1910 names = []
1911 wctx = repo[None]
1911 wctx = repo[None]
1912 cca = None
1912 cca = None
1913 abort, warn = scmutil.checkportabilityalert(ui)
1913 abort, warn = scmutil.checkportabilityalert(ui)
1914 if abort or warn:
1914 if abort or warn:
1915 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1915 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1916
1916
1917 badmatch = matchmod.badmatch(match, badfn)
1917 badmatch = matchmod.badmatch(match, badfn)
1918 dirstate = repo.dirstate
1918 dirstate = repo.dirstate
1919 # We don't want to just call wctx.walk here, since it would return a lot of
1919 # We don't want to just call wctx.walk here, since it would return a lot of
1920 # clean files, which we aren't interested in and takes time.
1920 # clean files, which we aren't interested in and takes time.
1921 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1921 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1922 unknown=True, ignored=False, full=False)):
1922 unknown=True, ignored=False, full=False)):
1923 exact = match.exact(f)
1923 exact = match.exact(f)
1924 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1924 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1925 if cca:
1925 if cca:
1926 cca(f)
1926 cca(f)
1927 names.append(f)
1927 names.append(f)
1928 if ui.verbose or not exact:
1928 if ui.verbose or not exact:
1929 ui.status(_('adding %s\n') % match.rel(f))
1929 ui.status(_('adding %s\n') % match.rel(f))
1930
1930
1931 for subpath in sorted(wctx.substate):
1931 for subpath in sorted(wctx.substate):
1932 sub = wctx.sub(subpath)
1932 sub = wctx.sub(subpath)
1933 try:
1933 try:
1934 submatch = matchmod.subdirmatcher(subpath, match)
1934 submatch = matchmod.subdirmatcher(subpath, match)
1935 if opts.get(r'subrepos'):
1935 if opts.get(r'subrepos'):
1936 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1936 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1937 else:
1937 else:
1938 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1938 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1939 except error.LookupError:
1939 except error.LookupError:
1940 ui.status(_("skipping missing subrepository: %s\n")
1940 ui.status(_("skipping missing subrepository: %s\n")
1941 % join(subpath))
1941 % join(subpath))
1942
1942
1943 if not opts.get(r'dry_run'):
1943 if not opts.get(r'dry_run'):
1944 rejected = wctx.add(names, prefix)
1944 rejected = wctx.add(names, prefix)
1945 bad.extend(f for f in rejected if f in match.files())
1945 bad.extend(f for f in rejected if f in match.files())
1946 return bad
1946 return bad
1947
1947
1948 def addwebdirpath(repo, serverpath, webconf):
1948 def addwebdirpath(repo, serverpath, webconf):
1949 webconf[serverpath] = repo.root
1949 webconf[serverpath] = repo.root
1950 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1950 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1951
1951
1952 for r in repo.revs('filelog("path:.hgsub")'):
1952 for r in repo.revs('filelog("path:.hgsub")'):
1953 ctx = repo[r]
1953 ctx = repo[r]
1954 for subpath in ctx.substate:
1954 for subpath in ctx.substate:
1955 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1955 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1956
1956
1957 def forget(ui, repo, match, prefix, explicitonly):
1957 def forget(ui, repo, match, prefix, explicitonly):
1958 join = lambda f: os.path.join(prefix, f)
1958 join = lambda f: os.path.join(prefix, f)
1959 bad = []
1959 bad = []
1960 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1960 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1961 wctx = repo[None]
1961 wctx = repo[None]
1962 forgot = []
1962 forgot = []
1963
1963
1964 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1964 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1965 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1965 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1966 if explicitonly:
1966 if explicitonly:
1967 forget = [f for f in forget if match.exact(f)]
1967 forget = [f for f in forget if match.exact(f)]
1968
1968
1969 for subpath in sorted(wctx.substate):
1969 for subpath in sorted(wctx.substate):
1970 sub = wctx.sub(subpath)
1970 sub = wctx.sub(subpath)
1971 try:
1971 try:
1972 submatch = matchmod.subdirmatcher(subpath, match)
1972 submatch = matchmod.subdirmatcher(subpath, match)
1973 subbad, subforgot = sub.forget(submatch, prefix)
1973 subbad, subforgot = sub.forget(submatch, prefix)
1974 bad.extend([subpath + '/' + f for f in subbad])
1974 bad.extend([subpath + '/' + f for f in subbad])
1975 forgot.extend([subpath + '/' + f for f in subforgot])
1975 forgot.extend([subpath + '/' + f for f in subforgot])
1976 except error.LookupError:
1976 except error.LookupError:
1977 ui.status(_("skipping missing subrepository: %s\n")
1977 ui.status(_("skipping missing subrepository: %s\n")
1978 % join(subpath))
1978 % join(subpath))
1979
1979
1980 if not explicitonly:
1980 if not explicitonly:
1981 for f in match.files():
1981 for f in match.files():
1982 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1982 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1983 if f not in forgot:
1983 if f not in forgot:
1984 if repo.wvfs.exists(f):
1984 if repo.wvfs.exists(f):
1985 # Don't complain if the exact case match wasn't given.
1985 # Don't complain if the exact case match wasn't given.
1986 # But don't do this until after checking 'forgot', so
1986 # But don't do this until after checking 'forgot', so
1987 # that subrepo files aren't normalized, and this op is
1987 # that subrepo files aren't normalized, and this op is
1988 # purely from data cached by the status walk above.
1988 # purely from data cached by the status walk above.
1989 if repo.dirstate.normalize(f) in repo.dirstate:
1989 if repo.dirstate.normalize(f) in repo.dirstate:
1990 continue
1990 continue
1991 ui.warn(_('not removing %s: '
1991 ui.warn(_('not removing %s: '
1992 'file is already untracked\n')
1992 'file is already untracked\n')
1993 % match.rel(f))
1993 % match.rel(f))
1994 bad.append(f)
1994 bad.append(f)
1995
1995
1996 for f in forget:
1996 for f in forget:
1997 if ui.verbose or not match.exact(f):
1997 if ui.verbose or not match.exact(f):
1998 ui.status(_('removing %s\n') % match.rel(f))
1998 ui.status(_('removing %s\n') % match.rel(f))
1999
1999
2000 rejected = wctx.forget(forget, prefix)
2000 rejected = wctx.forget(forget, prefix)
2001 bad.extend(f for f in rejected if f in match.files())
2001 bad.extend(f for f in rejected if f in match.files())
2002 forgot.extend(f for f in forget if f not in rejected)
2002 forgot.extend(f for f in forget if f not in rejected)
2003 return bad, forgot
2003 return bad, forgot
2004
2004
2005 def files(ui, ctx, m, fm, fmt, subrepos):
2005 def files(ui, ctx, m, fm, fmt, subrepos):
2006 rev = ctx.rev()
2006 rev = ctx.rev()
2007 ret = 1
2007 ret = 1
2008 ds = ctx.repo().dirstate
2008 ds = ctx.repo().dirstate
2009
2009
2010 for f in ctx.matches(m):
2010 for f in ctx.matches(m):
2011 if rev is None and ds[f] == 'r':
2011 if rev is None and ds[f] == 'r':
2012 continue
2012 continue
2013 fm.startitem()
2013 fm.startitem()
2014 if ui.verbose:
2014 if ui.verbose:
2015 fc = ctx[f]
2015 fc = ctx[f]
2016 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2016 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2017 fm.data(abspath=f)
2017 fm.data(abspath=f)
2018 fm.write('path', fmt, m.rel(f))
2018 fm.write('path', fmt, m.rel(f))
2019 ret = 0
2019 ret = 0
2020
2020
2021 for subpath in sorted(ctx.substate):
2021 for subpath in sorted(ctx.substate):
2022 submatch = matchmod.subdirmatcher(subpath, m)
2022 submatch = matchmod.subdirmatcher(subpath, m)
2023 if (subrepos or m.exact(subpath) or any(submatch.files())):
2023 if (subrepos or m.exact(subpath) or any(submatch.files())):
2024 sub = ctx.sub(subpath)
2024 sub = ctx.sub(subpath)
2025 try:
2025 try:
2026 recurse = m.exact(subpath) or subrepos
2026 recurse = m.exact(subpath) or subrepos
2027 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2027 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2028 ret = 0
2028 ret = 0
2029 except error.LookupError:
2029 except error.LookupError:
2030 ui.status(_("skipping missing subrepository: %s\n")
2030 ui.status(_("skipping missing subrepository: %s\n")
2031 % m.abs(subpath))
2031 % m.abs(subpath))
2032
2032
2033 return ret
2033 return ret
2034
2034
2035 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2035 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2036 join = lambda f: os.path.join(prefix, f)
2036 join = lambda f: os.path.join(prefix, f)
2037 ret = 0
2037 ret = 0
2038 s = repo.status(match=m, clean=True)
2038 s = repo.status(match=m, clean=True)
2039 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2039 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2040
2040
2041 wctx = repo[None]
2041 wctx = repo[None]
2042
2042
2043 if warnings is None:
2043 if warnings is None:
2044 warnings = []
2044 warnings = []
2045 warn = True
2045 warn = True
2046 else:
2046 else:
2047 warn = False
2047 warn = False
2048
2048
2049 subs = sorted(wctx.substate)
2049 subs = sorted(wctx.substate)
2050 total = len(subs)
2050 total = len(subs)
2051 count = 0
2051 count = 0
2052 for subpath in subs:
2052 for subpath in subs:
2053 count += 1
2053 count += 1
2054 submatch = matchmod.subdirmatcher(subpath, m)
2054 submatch = matchmod.subdirmatcher(subpath, m)
2055 if subrepos or m.exact(subpath) or any(submatch.files()):
2055 if subrepos or m.exact(subpath) or any(submatch.files()):
2056 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2056 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2057 sub = wctx.sub(subpath)
2057 sub = wctx.sub(subpath)
2058 try:
2058 try:
2059 if sub.removefiles(submatch, prefix, after, force, subrepos,
2059 if sub.removefiles(submatch, prefix, after, force, subrepos,
2060 warnings):
2060 warnings):
2061 ret = 1
2061 ret = 1
2062 except error.LookupError:
2062 except error.LookupError:
2063 warnings.append(_("skipping missing subrepository: %s\n")
2063 warnings.append(_("skipping missing subrepository: %s\n")
2064 % join(subpath))
2064 % join(subpath))
2065 ui.progress(_('searching'), None)
2065 ui.progress(_('searching'), None)
2066
2066
2067 # warn about failure to delete explicit files/dirs
2067 # warn about failure to delete explicit files/dirs
2068 deleteddirs = util.dirs(deleted)
2068 deleteddirs = util.dirs(deleted)
2069 files = m.files()
2069 files = m.files()
2070 total = len(files)
2070 total = len(files)
2071 count = 0
2071 count = 0
2072 for f in files:
2072 for f in files:
2073 def insubrepo():
2073 def insubrepo():
2074 for subpath in wctx.substate:
2074 for subpath in wctx.substate:
2075 if f.startswith(subpath + '/'):
2075 if f.startswith(subpath + '/'):
2076 return True
2076 return True
2077 return False
2077 return False
2078
2078
2079 count += 1
2079 count += 1
2080 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2080 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2081 isdir = f in deleteddirs or wctx.hasdir(f)
2081 isdir = f in deleteddirs or wctx.hasdir(f)
2082 if (f in repo.dirstate or isdir or f == '.'
2082 if (f in repo.dirstate or isdir or f == '.'
2083 or insubrepo() or f in subs):
2083 or insubrepo() or f in subs):
2084 continue
2084 continue
2085
2085
2086 if repo.wvfs.exists(f):
2086 if repo.wvfs.exists(f):
2087 if repo.wvfs.isdir(f):
2087 if repo.wvfs.isdir(f):
2088 warnings.append(_('not removing %s: no tracked files\n')
2088 warnings.append(_('not removing %s: no tracked files\n')
2089 % m.rel(f))
2089 % m.rel(f))
2090 else:
2090 else:
2091 warnings.append(_('not removing %s: file is untracked\n')
2091 warnings.append(_('not removing %s: file is untracked\n')
2092 % m.rel(f))
2092 % m.rel(f))
2093 # missing files will generate a warning elsewhere
2093 # missing files will generate a warning elsewhere
2094 ret = 1
2094 ret = 1
2095 ui.progress(_('deleting'), None)
2095 ui.progress(_('deleting'), None)
2096
2096
2097 if force:
2097 if force:
2098 list = modified + deleted + clean + added
2098 list = modified + deleted + clean + added
2099 elif after:
2099 elif after:
2100 list = deleted
2100 list = deleted
2101 remaining = modified + added + clean
2101 remaining = modified + added + clean
2102 total = len(remaining)
2102 total = len(remaining)
2103 count = 0
2103 count = 0
2104 for f in remaining:
2104 for f in remaining:
2105 count += 1
2105 count += 1
2106 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2106 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2107 if ui.verbose or (f in files):
2107 if ui.verbose or (f in files):
2108 warnings.append(_('not removing %s: file still exists\n')
2108 warnings.append(_('not removing %s: file still exists\n')
2109 % m.rel(f))
2109 % m.rel(f))
2110 ret = 1
2110 ret = 1
2111 ui.progress(_('skipping'), None)
2111 ui.progress(_('skipping'), None)
2112 else:
2112 else:
2113 list = deleted + clean
2113 list = deleted + clean
2114 total = len(modified) + len(added)
2114 total = len(modified) + len(added)
2115 count = 0
2115 count = 0
2116 for f in modified:
2116 for f in modified:
2117 count += 1
2117 count += 1
2118 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2118 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2119 warnings.append(_('not removing %s: file is modified (use -f'
2119 warnings.append(_('not removing %s: file is modified (use -f'
2120 ' to force removal)\n') % m.rel(f))
2120 ' to force removal)\n') % m.rel(f))
2121 ret = 1
2121 ret = 1
2122 for f in added:
2122 for f in added:
2123 count += 1
2123 count += 1
2124 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2124 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2125 warnings.append(_("not removing %s: file has been marked for add"
2125 warnings.append(_("not removing %s: file has been marked for add"
2126 " (use 'hg forget' to undo add)\n") % m.rel(f))
2126 " (use 'hg forget' to undo add)\n") % m.rel(f))
2127 ret = 1
2127 ret = 1
2128 ui.progress(_('skipping'), None)
2128 ui.progress(_('skipping'), None)
2129
2129
2130 list = sorted(list)
2130 list = sorted(list)
2131 total = len(list)
2131 total = len(list)
2132 count = 0
2132 count = 0
2133 for f in list:
2133 for f in list:
2134 count += 1
2134 count += 1
2135 if ui.verbose or not m.exact(f):
2135 if ui.verbose or not m.exact(f):
2136 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2136 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2137 ui.status(_('removing %s\n') % m.rel(f))
2137 ui.status(_('removing %s\n') % m.rel(f))
2138 ui.progress(_('deleting'), None)
2138 ui.progress(_('deleting'), None)
2139
2139
2140 with repo.wlock():
2140 with repo.wlock():
2141 if not after:
2141 if not after:
2142 for f in list:
2142 for f in list:
2143 if f in added:
2143 if f in added:
2144 continue # we never unlink added files on remove
2144 continue # we never unlink added files on remove
2145 repo.wvfs.unlinkpath(f, ignoremissing=True)
2145 repo.wvfs.unlinkpath(f, ignoremissing=True)
2146 repo[None].forget(list)
2146 repo[None].forget(list)
2147
2147
2148 if warn:
2148 if warn:
2149 for warning in warnings:
2149 for warning in warnings:
2150 ui.warn(warning)
2150 ui.warn(warning)
2151
2151
2152 return ret
2152 return ret
2153
2153
2154 def _updatecatformatter(fm, ctx, matcher, path, decode):
2154 def _updatecatformatter(fm, ctx, matcher, path, decode):
2155 """Hook for adding data to the formatter used by ``hg cat``.
2155 """Hook for adding data to the formatter used by ``hg cat``.
2156
2156
2157 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2157 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2158 this method first."""
2158 this method first."""
2159 data = ctx[path].data()
2159 data = ctx[path].data()
2160 if decode:
2160 if decode:
2161 data = ctx.repo().wwritedata(path, data)
2161 data = ctx.repo().wwritedata(path, data)
2162 fm.startitem()
2162 fm.startitem()
2163 fm.write('data', '%s', data)
2163 fm.write('data', '%s', data)
2164 fm.data(abspath=path, path=matcher.rel(path))
2164 fm.data(abspath=path, path=matcher.rel(path))
2165
2165
2166 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2166 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2167 err = 1
2167 err = 1
2168 opts = pycompat.byteskwargs(opts)
2168 opts = pycompat.byteskwargs(opts)
2169
2169
2170 def write(path):
2170 def write(path):
2171 filename = None
2171 filename = None
2172 if fntemplate:
2172 if fntemplate:
2173 filename = makefilename(repo, fntemplate, ctx.node(),
2173 filename = makefilename(repo, fntemplate, ctx.node(),
2174 pathname=os.path.join(prefix, path))
2174 pathname=os.path.join(prefix, path))
2175 # attempt to create the directory if it does not already exist
2175 # attempt to create the directory if it does not already exist
2176 try:
2176 try:
2177 os.makedirs(os.path.dirname(filename))
2177 os.makedirs(os.path.dirname(filename))
2178 except OSError:
2178 except OSError:
2179 pass
2179 pass
2180 with formatter.maybereopen(basefm, filename, opts) as fm:
2180 with formatter.maybereopen(basefm, filename, opts) as fm:
2181 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2181 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2182
2182
2183 # Automation often uses hg cat on single files, so special case it
2183 # Automation often uses hg cat on single files, so special case it
2184 # for performance to avoid the cost of parsing the manifest.
2184 # for performance to avoid the cost of parsing the manifest.
2185 if len(matcher.files()) == 1 and not matcher.anypats():
2185 if len(matcher.files()) == 1 and not matcher.anypats():
2186 file = matcher.files()[0]
2186 file = matcher.files()[0]
2187 mfl = repo.manifestlog
2187 mfl = repo.manifestlog
2188 mfnode = ctx.manifestnode()
2188 mfnode = ctx.manifestnode()
2189 try:
2189 try:
2190 if mfnode and mfl[mfnode].find(file)[0]:
2190 if mfnode and mfl[mfnode].find(file)[0]:
2191 _prefetchfiles(repo, ctx, [file])
2191 _prefetchfiles(repo, ctx, [file])
2192 write(file)
2192 write(file)
2193 return 0
2193 return 0
2194 except KeyError:
2194 except KeyError:
2195 pass
2195 pass
2196
2196
2197 files = [f for f in ctx.walk(matcher)]
2197 files = [f for f in ctx.walk(matcher)]
2198 _prefetchfiles(repo, ctx, files)
2198 _prefetchfiles(repo, ctx, files)
2199
2199
2200 for abs in files:
2200 for abs in files:
2201 write(abs)
2201 write(abs)
2202 err = 0
2202 err = 0
2203
2203
2204 for subpath in sorted(ctx.substate):
2204 for subpath in sorted(ctx.substate):
2205 sub = ctx.sub(subpath)
2205 sub = ctx.sub(subpath)
2206 try:
2206 try:
2207 submatch = matchmod.subdirmatcher(subpath, matcher)
2207 submatch = matchmod.subdirmatcher(subpath, matcher)
2208
2208
2209 if not sub.cat(submatch, basefm, fntemplate,
2209 if not sub.cat(submatch, basefm, fntemplate,
2210 os.path.join(prefix, sub._path),
2210 os.path.join(prefix, sub._path),
2211 **pycompat.strkwargs(opts)):
2211 **pycompat.strkwargs(opts)):
2212 err = 0
2212 err = 0
2213 except error.RepoLookupError:
2213 except error.RepoLookupError:
2214 ui.status(_("skipping missing subrepository: %s\n")
2214 ui.status(_("skipping missing subrepository: %s\n")
2215 % os.path.join(prefix, subpath))
2215 % os.path.join(prefix, subpath))
2216
2216
2217 return err
2217 return err
2218
2218
2219 def commit(ui, repo, commitfunc, pats, opts):
2219 def commit(ui, repo, commitfunc, pats, opts):
2220 '''commit the specified files or all outstanding changes'''
2220 '''commit the specified files or all outstanding changes'''
2221 date = opts.get('date')
2221 date = opts.get('date')
2222 if date:
2222 if date:
2223 opts['date'] = util.parsedate(date)
2223 opts['date'] = util.parsedate(date)
2224 message = logmessage(ui, opts)
2224 message = logmessage(ui, opts)
2225 matcher = scmutil.match(repo[None], pats, opts)
2225 matcher = scmutil.match(repo[None], pats, opts)
2226
2226
2227 dsguard = None
2227 dsguard = None
2228 # extract addremove carefully -- this function can be called from a command
2228 # extract addremove carefully -- this function can be called from a command
2229 # that doesn't support addremove
2229 # that doesn't support addremove
2230 if opts.get('addremove'):
2230 if opts.get('addremove'):
2231 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2231 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2232 with dsguard or util.nullcontextmanager():
2232 with dsguard or util.nullcontextmanager():
2233 if dsguard:
2233 if dsguard:
2234 if scmutil.addremove(repo, matcher, "", opts) != 0:
2234 if scmutil.addremove(repo, matcher, "", opts) != 0:
2235 raise error.Abort(
2235 raise error.Abort(
2236 _("failed to mark all new/missing files as added/removed"))
2236 _("failed to mark all new/missing files as added/removed"))
2237
2237
2238 return commitfunc(ui, repo, message, matcher, opts)
2238 return commitfunc(ui, repo, message, matcher, opts)
2239
2239
2240 def samefile(f, ctx1, ctx2):
2240 def samefile(f, ctx1, ctx2):
2241 if f in ctx1.manifest():
2241 if f in ctx1.manifest():
2242 a = ctx1.filectx(f)
2242 a = ctx1.filectx(f)
2243 if f in ctx2.manifest():
2243 if f in ctx2.manifest():
2244 b = ctx2.filectx(f)
2244 b = ctx2.filectx(f)
2245 return (not a.cmp(b)
2245 return (not a.cmp(b)
2246 and a.flags() == b.flags())
2246 and a.flags() == b.flags())
2247 else:
2247 else:
2248 return False
2248 return False
2249 else:
2249 else:
2250 return f not in ctx2.manifest()
2250 return f not in ctx2.manifest()
2251
2251
2252 def amend(ui, repo, old, extra, pats, opts):
2252 def amend(ui, repo, old, extra, pats, opts):
2253 # avoid cycle context -> subrepo -> cmdutil
2253 # avoid cycle context -> subrepo -> cmdutil
2254 from . import context
2254 from . import context
2255
2255
2256 # amend will reuse the existing user if not specified, but the obsolete
2256 # amend will reuse the existing user if not specified, but the obsolete
2257 # marker creation requires that the current user's name is specified.
2257 # marker creation requires that the current user's name is specified.
2258 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2258 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2259 ui.username() # raise exception if username not set
2259 ui.username() # raise exception if username not set
2260
2260
2261 ui.note(_('amending changeset %s\n') % old)
2261 ui.note(_('amending changeset %s\n') % old)
2262 base = old.p1()
2262 base = old.p1()
2263
2263
2264 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2264 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2265 # Participating changesets:
2265 # Participating changesets:
2266 #
2266 #
2267 # wctx o - workingctx that contains changes from working copy
2267 # wctx o - workingctx that contains changes from working copy
2268 # | to go into amending commit
2268 # | to go into amending commit
2269 # |
2269 # |
2270 # old o - changeset to amend
2270 # old o - changeset to amend
2271 # |
2271 # |
2272 # base o - first parent of the changeset to amend
2272 # base o - first parent of the changeset to amend
2273 wctx = repo[None]
2273 wctx = repo[None]
2274
2274
2275 # Copy to avoid mutating input
2275 # Copy to avoid mutating input
2276 extra = extra.copy()
2276 extra = extra.copy()
2277 # Update extra dict from amended commit (e.g. to preserve graft
2277 # Update extra dict from amended commit (e.g. to preserve graft
2278 # source)
2278 # source)
2279 extra.update(old.extra())
2279 extra.update(old.extra())
2280
2280
2281 # Also update it from the from the wctx
2281 # Also update it from the from the wctx
2282 extra.update(wctx.extra())
2282 extra.update(wctx.extra())
2283
2283
2284 user = opts.get('user') or old.user()
2284 user = opts.get('user') or old.user()
2285 date = opts.get('date') or old.date()
2285 date = opts.get('date') or old.date()
2286
2286
2287 # Parse the date to allow comparison between date and old.date()
2287 # Parse the date to allow comparison between date and old.date()
2288 date = util.parsedate(date)
2288 date = util.parsedate(date)
2289
2289
2290 if len(old.parents()) > 1:
2290 if len(old.parents()) > 1:
2291 # ctx.files() isn't reliable for merges, so fall back to the
2291 # ctx.files() isn't reliable for merges, so fall back to the
2292 # slower repo.status() method
2292 # slower repo.status() method
2293 files = set([fn for st in repo.status(base, old)[:3]
2293 files = set([fn for st in repo.status(base, old)[:3]
2294 for fn in st])
2294 for fn in st])
2295 else:
2295 else:
2296 files = set(old.files())
2296 files = set(old.files())
2297
2297
2298 # add/remove the files to the working copy if the "addremove" option
2298 # add/remove the files to the working copy if the "addremove" option
2299 # was specified.
2299 # was specified.
2300 matcher = scmutil.match(wctx, pats, opts)
2300 matcher = scmutil.match(wctx, pats, opts)
2301 if (opts.get('addremove')
2301 if (opts.get('addremove')
2302 and scmutil.addremove(repo, matcher, "", opts)):
2302 and scmutil.addremove(repo, matcher, "", opts)):
2303 raise error.Abort(
2303 raise error.Abort(
2304 _("failed to mark all new/missing files as added/removed"))
2304 _("failed to mark all new/missing files as added/removed"))
2305
2305
2306 # Check subrepos. This depends on in-place wctx._status update in
2306 # Check subrepos. This depends on in-place wctx._status update in
2307 # subrepo.precommit(). To minimize the risk of this hack, we do
2307 # subrepo.precommit(). To minimize the risk of this hack, we do
2308 # nothing if .hgsub does not exist.
2308 # nothing if .hgsub does not exist.
2309 if '.hgsub' in wctx or '.hgsub' in old:
2309 if '.hgsub' in wctx or '.hgsub' in old:
2310 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
2310 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
2311 subs, commitsubs, newsubstate = subrepo.precommit(
2311 subs, commitsubs, newsubstate = subrepo.precommit(
2312 ui, wctx, wctx._status, matcher)
2312 ui, wctx, wctx._status, matcher)
2313 # amend should abort if commitsubrepos is enabled
2313 # amend should abort if commitsubrepos is enabled
2314 assert not commitsubs
2314 assert not commitsubs
2315 if subs:
2315 if subs:
2316 subrepo.writestate(repo, newsubstate)
2316 subrepo.writestate(repo, newsubstate)
2317
2317
2318 filestoamend = set(f for f in wctx.files() if matcher(f))
2318 filestoamend = set(f for f in wctx.files() if matcher(f))
2319
2319
2320 changes = (len(filestoamend) > 0)
2320 changes = (len(filestoamend) > 0)
2321 if changes:
2321 if changes:
2322 # Recompute copies (avoid recording a -> b -> a)
2322 # Recompute copies (avoid recording a -> b -> a)
2323 copied = copies.pathcopies(base, wctx, matcher)
2323 copied = copies.pathcopies(base, wctx, matcher)
2324 if old.p2:
2324 if old.p2:
2325 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2325 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2326
2326
2327 # Prune files which were reverted by the updates: if old
2327 # Prune files which were reverted by the updates: if old
2328 # introduced file X and the file was renamed in the working
2328 # introduced file X and the file was renamed in the working
2329 # copy, then those two files are the same and
2329 # copy, then those two files are the same and
2330 # we can discard X from our list of files. Likewise if X
2330 # we can discard X from our list of files. Likewise if X
2331 # was removed, it's no longer relevant. If X is missing (aka
2331 # was removed, it's no longer relevant. If X is missing (aka
2332 # deleted), old X must be preserved.
2332 # deleted), old X must be preserved.
2333 files.update(filestoamend)
2333 files.update(filestoamend)
2334 files = [f for f in files if (not samefile(f, wctx, base)
2334 files = [f for f in files if (not samefile(f, wctx, base)
2335 or f in wctx.deleted())]
2335 or f in wctx.deleted())]
2336
2336
2337 def filectxfn(repo, ctx_, path):
2337 def filectxfn(repo, ctx_, path):
2338 try:
2338 try:
2339 # If the file being considered is not amongst the files
2339 # If the file being considered is not amongst the files
2340 # to be amended, we should return the file context from the
2340 # to be amended, we should return the file context from the
2341 # old changeset. This avoids issues when only some files in
2341 # old changeset. This avoids issues when only some files in
2342 # the working copy are being amended but there are also
2342 # the working copy are being amended but there are also
2343 # changes to other files from the old changeset.
2343 # changes to other files from the old changeset.
2344 if path not in filestoamend:
2344 if path not in filestoamend:
2345 return old.filectx(path)
2345 return old.filectx(path)
2346
2346
2347 # Return None for removed files.
2347 # Return None for removed files.
2348 if path in wctx.removed():
2348 if path in wctx.removed():
2349 return None
2349 return None
2350
2350
2351 fctx = wctx[path]
2351 fctx = wctx[path]
2352 flags = fctx.flags()
2352 flags = fctx.flags()
2353 mctx = context.memfilectx(repo, ctx_,
2353 mctx = context.memfilectx(repo, ctx_,
2354 fctx.path(), fctx.data(),
2354 fctx.path(), fctx.data(),
2355 islink='l' in flags,
2355 islink='l' in flags,
2356 isexec='x' in flags,
2356 isexec='x' in flags,
2357 copied=copied.get(path))
2357 copied=copied.get(path))
2358 return mctx
2358 return mctx
2359 except KeyError:
2359 except KeyError:
2360 return None
2360 return None
2361 else:
2361 else:
2362 ui.note(_('copying changeset %s to %s\n') % (old, base))
2362 ui.note(_('copying changeset %s to %s\n') % (old, base))
2363
2363
2364 # Use version of files as in the old cset
2364 # Use version of files as in the old cset
2365 def filectxfn(repo, ctx_, path):
2365 def filectxfn(repo, ctx_, path):
2366 try:
2366 try:
2367 return old.filectx(path)
2367 return old.filectx(path)
2368 except KeyError:
2368 except KeyError:
2369 return None
2369 return None
2370
2370
2371 # See if we got a message from -m or -l, if not, open the editor with
2371 # See if we got a message from -m or -l, if not, open the editor with
2372 # the message of the changeset to amend.
2372 # the message of the changeset to amend.
2373 message = logmessage(ui, opts)
2373 message = logmessage(ui, opts)
2374
2374
2375 editform = mergeeditform(old, 'commit.amend')
2375 editform = mergeeditform(old, 'commit.amend')
2376 editor = getcommiteditor(editform=editform,
2376 editor = getcommiteditor(editform=editform,
2377 **pycompat.strkwargs(opts))
2377 **pycompat.strkwargs(opts))
2378
2378
2379 if not message:
2379 if not message:
2380 editor = getcommiteditor(edit=True, editform=editform)
2380 editor = getcommiteditor(edit=True, editform=editform)
2381 message = old.description()
2381 message = old.description()
2382
2382
2383 pureextra = extra.copy()
2383 pureextra = extra.copy()
2384 extra['amend_source'] = old.hex()
2384 extra['amend_source'] = old.hex()
2385
2385
2386 new = context.memctx(repo,
2386 new = context.memctx(repo,
2387 parents=[base.node(), old.p2().node()],
2387 parents=[base.node(), old.p2().node()],
2388 text=message,
2388 text=message,
2389 files=files,
2389 files=files,
2390 filectxfn=filectxfn,
2390 filectxfn=filectxfn,
2391 user=user,
2391 user=user,
2392 date=date,
2392 date=date,
2393 extra=extra,
2393 extra=extra,
2394 editor=editor)
2394 editor=editor)
2395
2395
2396 newdesc = changelog.stripdesc(new.description())
2396 newdesc = changelog.stripdesc(new.description())
2397 if ((not changes)
2397 if ((not changes)
2398 and newdesc == old.description()
2398 and newdesc == old.description()
2399 and user == old.user()
2399 and user == old.user()
2400 and date == old.date()
2400 and date == old.date()
2401 and pureextra == old.extra()):
2401 and pureextra == old.extra()):
2402 # nothing changed. continuing here would create a new node
2402 # nothing changed. continuing here would create a new node
2403 # anyway because of the amend_source noise.
2403 # anyway because of the amend_source noise.
2404 #
2404 #
2405 # This not what we expect from amend.
2405 # This not what we expect from amend.
2406 return old.node()
2406 return old.node()
2407
2407
2408 if opts.get('secret'):
2408 if opts.get('secret'):
2409 commitphase = 'secret'
2409 commitphase = 'secret'
2410 else:
2410 else:
2411 commitphase = old.phase()
2411 commitphase = old.phase()
2412 overrides = {('phases', 'new-commit'): commitphase}
2412 overrides = {('phases', 'new-commit'): commitphase}
2413 with ui.configoverride(overrides, 'amend'):
2413 with ui.configoverride(overrides, 'amend'):
2414 newid = repo.commitctx(new)
2414 newid = repo.commitctx(new)
2415
2415
2416 # Reroute the working copy parent to the new changeset
2416 # Reroute the working copy parent to the new changeset
2417 repo.setparents(newid, nullid)
2417 repo.setparents(newid, nullid)
2418 mapping = {old.node(): (newid,)}
2418 mapping = {old.node(): (newid,)}
2419 obsmetadata = None
2419 obsmetadata = None
2420 if opts.get('note'):
2420 if opts.get('note'):
2421 obsmetadata = {'note': opts['note']}
2421 obsmetadata = {'note': opts['note']}
2422 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2422 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2423
2423
2424 # Fixing the dirstate because localrepo.commitctx does not update
2424 # Fixing the dirstate because localrepo.commitctx does not update
2425 # it. This is rather convenient because we did not need to update
2425 # it. This is rather convenient because we did not need to update
2426 # the dirstate for all the files in the new commit which commitctx
2426 # the dirstate for all the files in the new commit which commitctx
2427 # could have done if it updated the dirstate. Now, we can
2427 # could have done if it updated the dirstate. Now, we can
2428 # selectively update the dirstate only for the amended files.
2428 # selectively update the dirstate only for the amended files.
2429 dirstate = repo.dirstate
2429 dirstate = repo.dirstate
2430
2430
2431 # Update the state of the files which were added and
2431 # Update the state of the files which were added and
2432 # and modified in the amend to "normal" in the dirstate.
2432 # and modified in the amend to "normal" in the dirstate.
2433 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2433 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2434 for f in normalfiles:
2434 for f in normalfiles:
2435 dirstate.normal(f)
2435 dirstate.normal(f)
2436
2436
2437 # Update the state of files which were removed in the amend
2437 # Update the state of files which were removed in the amend
2438 # to "removed" in the dirstate.
2438 # to "removed" in the dirstate.
2439 removedfiles = set(wctx.removed()) & filestoamend
2439 removedfiles = set(wctx.removed()) & filestoamend
2440 for f in removedfiles:
2440 for f in removedfiles:
2441 dirstate.drop(f)
2441 dirstate.drop(f)
2442
2442
2443 return newid
2443 return newid
2444
2444
2445 def commiteditor(repo, ctx, subs, editform=''):
2445 def commiteditor(repo, ctx, subs, editform=''):
2446 if ctx.description():
2446 if ctx.description():
2447 return ctx.description()
2447 return ctx.description()
2448 return commitforceeditor(repo, ctx, subs, editform=editform,
2448 return commitforceeditor(repo, ctx, subs, editform=editform,
2449 unchangedmessagedetection=True)
2449 unchangedmessagedetection=True)
2450
2450
2451 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2451 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2452 editform='', unchangedmessagedetection=False):
2452 editform='', unchangedmessagedetection=False):
2453 if not extramsg:
2453 if not extramsg:
2454 extramsg = _("Leave message empty to abort commit.")
2454 extramsg = _("Leave message empty to abort commit.")
2455
2455
2456 forms = [e for e in editform.split('.') if e]
2456 forms = [e for e in editform.split('.') if e]
2457 forms.insert(0, 'changeset')
2457 forms.insert(0, 'changeset')
2458 templatetext = None
2458 templatetext = None
2459 while forms:
2459 while forms:
2460 ref = '.'.join(forms)
2460 ref = '.'.join(forms)
2461 if repo.ui.config('committemplate', ref):
2461 if repo.ui.config('committemplate', ref):
2462 templatetext = committext = buildcommittemplate(
2462 templatetext = committext = buildcommittemplate(
2463 repo, ctx, subs, extramsg, ref)
2463 repo, ctx, subs, extramsg, ref)
2464 break
2464 break
2465 forms.pop()
2465 forms.pop()
2466 else:
2466 else:
2467 committext = buildcommittext(repo, ctx, subs, extramsg)
2467 committext = buildcommittext(repo, ctx, subs, extramsg)
2468
2468
2469 # run editor in the repository root
2469 # run editor in the repository root
2470 olddir = pycompat.getcwd()
2470 olddir = pycompat.getcwd()
2471 os.chdir(repo.root)
2471 os.chdir(repo.root)
2472
2472
2473 # make in-memory changes visible to external process
2473 # make in-memory changes visible to external process
2474 tr = repo.currenttransaction()
2474 tr = repo.currenttransaction()
2475 repo.dirstate.write(tr)
2475 repo.dirstate.write(tr)
2476 pending = tr and tr.writepending() and repo.root
2476 pending = tr and tr.writepending() and repo.root
2477
2477
2478 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2478 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2479 editform=editform, pending=pending,
2479 editform=editform, pending=pending,
2480 repopath=repo.path, action='commit')
2480 repopath=repo.path, action='commit')
2481 text = editortext
2481 text = editortext
2482
2482
2483 # strip away anything below this special string (used for editors that want
2483 # strip away anything below this special string (used for editors that want
2484 # to display the diff)
2484 # to display the diff)
2485 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2485 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2486 if stripbelow:
2486 if stripbelow:
2487 text = text[:stripbelow.start()]
2487 text = text[:stripbelow.start()]
2488
2488
2489 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2489 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2490 os.chdir(olddir)
2490 os.chdir(olddir)
2491
2491
2492 if finishdesc:
2492 if finishdesc:
2493 text = finishdesc(text)
2493 text = finishdesc(text)
2494 if not text.strip():
2494 if not text.strip():
2495 raise error.Abort(_("empty commit message"))
2495 raise error.Abort(_("empty commit message"))
2496 if unchangedmessagedetection and editortext == templatetext:
2496 if unchangedmessagedetection and editortext == templatetext:
2497 raise error.Abort(_("commit message unchanged"))
2497 raise error.Abort(_("commit message unchanged"))
2498
2498
2499 return text
2499 return text
2500
2500
2501 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2501 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2502 ui = repo.ui
2502 ui = repo.ui
2503 spec = formatter.templatespec(ref, None, None)
2503 spec = formatter.templatespec(ref, None, None)
2504 t = logcmdutil.changesettemplater(ui, repo, spec, None, {}, False)
2504 t = logcmdutil.changesettemplater(ui, repo, spec)
2505 t.t.cache.update((k, templater.unquotestring(v))
2505 t.t.cache.update((k, templater.unquotestring(v))
2506 for k, v in repo.ui.configitems('committemplate'))
2506 for k, v in repo.ui.configitems('committemplate'))
2507
2507
2508 if not extramsg:
2508 if not extramsg:
2509 extramsg = '' # ensure that extramsg is string
2509 extramsg = '' # ensure that extramsg is string
2510
2510
2511 ui.pushbuffer()
2511 ui.pushbuffer()
2512 t.show(ctx, extramsg=extramsg)
2512 t.show(ctx, extramsg=extramsg)
2513 return ui.popbuffer()
2513 return ui.popbuffer()
2514
2514
2515 def hgprefix(msg):
2515 def hgprefix(msg):
2516 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2516 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2517
2517
2518 def buildcommittext(repo, ctx, subs, extramsg):
2518 def buildcommittext(repo, ctx, subs, extramsg):
2519 edittext = []
2519 edittext = []
2520 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2520 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2521 if ctx.description():
2521 if ctx.description():
2522 edittext.append(ctx.description())
2522 edittext.append(ctx.description())
2523 edittext.append("")
2523 edittext.append("")
2524 edittext.append("") # Empty line between message and comments.
2524 edittext.append("") # Empty line between message and comments.
2525 edittext.append(hgprefix(_("Enter commit message."
2525 edittext.append(hgprefix(_("Enter commit message."
2526 " Lines beginning with 'HG:' are removed.")))
2526 " Lines beginning with 'HG:' are removed.")))
2527 edittext.append(hgprefix(extramsg))
2527 edittext.append(hgprefix(extramsg))
2528 edittext.append("HG: --")
2528 edittext.append("HG: --")
2529 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2529 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2530 if ctx.p2():
2530 if ctx.p2():
2531 edittext.append(hgprefix(_("branch merge")))
2531 edittext.append(hgprefix(_("branch merge")))
2532 if ctx.branch():
2532 if ctx.branch():
2533 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2533 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2534 if bookmarks.isactivewdirparent(repo):
2534 if bookmarks.isactivewdirparent(repo):
2535 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2535 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2536 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2536 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2537 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2537 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2538 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2538 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2539 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2539 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2540 if not added and not modified and not removed:
2540 if not added and not modified and not removed:
2541 edittext.append(hgprefix(_("no files changed")))
2541 edittext.append(hgprefix(_("no files changed")))
2542 edittext.append("")
2542 edittext.append("")
2543
2543
2544 return "\n".join(edittext)
2544 return "\n".join(edittext)
2545
2545
2546 def commitstatus(repo, node, branch, bheads=None, opts=None):
2546 def commitstatus(repo, node, branch, bheads=None, opts=None):
2547 if opts is None:
2547 if opts is None:
2548 opts = {}
2548 opts = {}
2549 ctx = repo[node]
2549 ctx = repo[node]
2550 parents = ctx.parents()
2550 parents = ctx.parents()
2551
2551
2552 if (not opts.get('amend') and bheads and node not in bheads and not
2552 if (not opts.get('amend') and bheads and node not in bheads and not
2553 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2553 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2554 repo.ui.status(_('created new head\n'))
2554 repo.ui.status(_('created new head\n'))
2555 # The message is not printed for initial roots. For the other
2555 # The message is not printed for initial roots. For the other
2556 # changesets, it is printed in the following situations:
2556 # changesets, it is printed in the following situations:
2557 #
2557 #
2558 # Par column: for the 2 parents with ...
2558 # Par column: for the 2 parents with ...
2559 # N: null or no parent
2559 # N: null or no parent
2560 # B: parent is on another named branch
2560 # B: parent is on another named branch
2561 # C: parent is a regular non head changeset
2561 # C: parent is a regular non head changeset
2562 # H: parent was a branch head of the current branch
2562 # H: parent was a branch head of the current branch
2563 # Msg column: whether we print "created new head" message
2563 # Msg column: whether we print "created new head" message
2564 # In the following, it is assumed that there already exists some
2564 # In the following, it is assumed that there already exists some
2565 # initial branch heads of the current branch, otherwise nothing is
2565 # initial branch heads of the current branch, otherwise nothing is
2566 # printed anyway.
2566 # printed anyway.
2567 #
2567 #
2568 # Par Msg Comment
2568 # Par Msg Comment
2569 # N N y additional topo root
2569 # N N y additional topo root
2570 #
2570 #
2571 # B N y additional branch root
2571 # B N y additional branch root
2572 # C N y additional topo head
2572 # C N y additional topo head
2573 # H N n usual case
2573 # H N n usual case
2574 #
2574 #
2575 # B B y weird additional branch root
2575 # B B y weird additional branch root
2576 # C B y branch merge
2576 # C B y branch merge
2577 # H B n merge with named branch
2577 # H B n merge with named branch
2578 #
2578 #
2579 # C C y additional head from merge
2579 # C C y additional head from merge
2580 # C H n merge with a head
2580 # C H n merge with a head
2581 #
2581 #
2582 # H H n head merge: head count decreases
2582 # H H n head merge: head count decreases
2583
2583
2584 if not opts.get('close_branch'):
2584 if not opts.get('close_branch'):
2585 for r in parents:
2585 for r in parents:
2586 if r.closesbranch() and r.branch() == branch:
2586 if r.closesbranch() and r.branch() == branch:
2587 repo.ui.status(_('reopening closed branch head %d\n') % r)
2587 repo.ui.status(_('reopening closed branch head %d\n') % r)
2588
2588
2589 if repo.ui.debugflag:
2589 if repo.ui.debugflag:
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2591 elif repo.ui.verbose:
2591 elif repo.ui.verbose:
2592 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2592 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2593
2593
2594 def postcommitstatus(repo, pats, opts):
2594 def postcommitstatus(repo, pats, opts):
2595 return repo.status(match=scmutil.match(repo[None], pats, opts))
2595 return repo.status(match=scmutil.match(repo[None], pats, opts))
2596
2596
2597 def revert(ui, repo, ctx, parents, *pats, **opts):
2597 def revert(ui, repo, ctx, parents, *pats, **opts):
2598 opts = pycompat.byteskwargs(opts)
2598 opts = pycompat.byteskwargs(opts)
2599 parent, p2 = parents
2599 parent, p2 = parents
2600 node = ctx.node()
2600 node = ctx.node()
2601
2601
2602 mf = ctx.manifest()
2602 mf = ctx.manifest()
2603 if node == p2:
2603 if node == p2:
2604 parent = p2
2604 parent = p2
2605
2605
2606 # need all matching names in dirstate and manifest of target rev,
2606 # need all matching names in dirstate and manifest of target rev,
2607 # so have to walk both. do not print errors if files exist in one
2607 # so have to walk both. do not print errors if files exist in one
2608 # but not other. in both cases, filesets should be evaluated against
2608 # but not other. in both cases, filesets should be evaluated against
2609 # workingctx to get consistent result (issue4497). this means 'set:**'
2609 # workingctx to get consistent result (issue4497). this means 'set:**'
2610 # cannot be used to select missing files from target rev.
2610 # cannot be used to select missing files from target rev.
2611
2611
2612 # `names` is a mapping for all elements in working copy and target revision
2612 # `names` is a mapping for all elements in working copy and target revision
2613 # The mapping is in the form:
2613 # The mapping is in the form:
2614 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2614 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2615 names = {}
2615 names = {}
2616
2616
2617 with repo.wlock():
2617 with repo.wlock():
2618 ## filling of the `names` mapping
2618 ## filling of the `names` mapping
2619 # walk dirstate to fill `names`
2619 # walk dirstate to fill `names`
2620
2620
2621 interactive = opts.get('interactive', False)
2621 interactive = opts.get('interactive', False)
2622 wctx = repo[None]
2622 wctx = repo[None]
2623 m = scmutil.match(wctx, pats, opts)
2623 m = scmutil.match(wctx, pats, opts)
2624
2624
2625 # we'll need this later
2625 # we'll need this later
2626 targetsubs = sorted(s for s in wctx.substate if m(s))
2626 targetsubs = sorted(s for s in wctx.substate if m(s))
2627
2627
2628 if not m.always():
2628 if not m.always():
2629 matcher = matchmod.badmatch(m, lambda x, y: False)
2629 matcher = matchmod.badmatch(m, lambda x, y: False)
2630 for abs in wctx.walk(matcher):
2630 for abs in wctx.walk(matcher):
2631 names[abs] = m.rel(abs), m.exact(abs)
2631 names[abs] = m.rel(abs), m.exact(abs)
2632
2632
2633 # walk target manifest to fill `names`
2633 # walk target manifest to fill `names`
2634
2634
2635 def badfn(path, msg):
2635 def badfn(path, msg):
2636 if path in names:
2636 if path in names:
2637 return
2637 return
2638 if path in ctx.substate:
2638 if path in ctx.substate:
2639 return
2639 return
2640 path_ = path + '/'
2640 path_ = path + '/'
2641 for f in names:
2641 for f in names:
2642 if f.startswith(path_):
2642 if f.startswith(path_):
2643 return
2643 return
2644 ui.warn("%s: %s\n" % (m.rel(path), msg))
2644 ui.warn("%s: %s\n" % (m.rel(path), msg))
2645
2645
2646 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2646 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2647 if abs not in names:
2647 if abs not in names:
2648 names[abs] = m.rel(abs), m.exact(abs)
2648 names[abs] = m.rel(abs), m.exact(abs)
2649
2649
2650 # Find status of all file in `names`.
2650 # Find status of all file in `names`.
2651 m = scmutil.matchfiles(repo, names)
2651 m = scmutil.matchfiles(repo, names)
2652
2652
2653 changes = repo.status(node1=node, match=m,
2653 changes = repo.status(node1=node, match=m,
2654 unknown=True, ignored=True, clean=True)
2654 unknown=True, ignored=True, clean=True)
2655 else:
2655 else:
2656 changes = repo.status(node1=node, match=m)
2656 changes = repo.status(node1=node, match=m)
2657 for kind in changes:
2657 for kind in changes:
2658 for abs in kind:
2658 for abs in kind:
2659 names[abs] = m.rel(abs), m.exact(abs)
2659 names[abs] = m.rel(abs), m.exact(abs)
2660
2660
2661 m = scmutil.matchfiles(repo, names)
2661 m = scmutil.matchfiles(repo, names)
2662
2662
2663 modified = set(changes.modified)
2663 modified = set(changes.modified)
2664 added = set(changes.added)
2664 added = set(changes.added)
2665 removed = set(changes.removed)
2665 removed = set(changes.removed)
2666 _deleted = set(changes.deleted)
2666 _deleted = set(changes.deleted)
2667 unknown = set(changes.unknown)
2667 unknown = set(changes.unknown)
2668 unknown.update(changes.ignored)
2668 unknown.update(changes.ignored)
2669 clean = set(changes.clean)
2669 clean = set(changes.clean)
2670 modadded = set()
2670 modadded = set()
2671
2671
2672 # We need to account for the state of the file in the dirstate,
2672 # We need to account for the state of the file in the dirstate,
2673 # even when we revert against something else than parent. This will
2673 # even when we revert against something else than parent. This will
2674 # slightly alter the behavior of revert (doing back up or not, delete
2674 # slightly alter the behavior of revert (doing back up or not, delete
2675 # or just forget etc).
2675 # or just forget etc).
2676 if parent == node:
2676 if parent == node:
2677 dsmodified = modified
2677 dsmodified = modified
2678 dsadded = added
2678 dsadded = added
2679 dsremoved = removed
2679 dsremoved = removed
2680 # store all local modifications, useful later for rename detection
2680 # store all local modifications, useful later for rename detection
2681 localchanges = dsmodified | dsadded
2681 localchanges = dsmodified | dsadded
2682 modified, added, removed = set(), set(), set()
2682 modified, added, removed = set(), set(), set()
2683 else:
2683 else:
2684 changes = repo.status(node1=parent, match=m)
2684 changes = repo.status(node1=parent, match=m)
2685 dsmodified = set(changes.modified)
2685 dsmodified = set(changes.modified)
2686 dsadded = set(changes.added)
2686 dsadded = set(changes.added)
2687 dsremoved = set(changes.removed)
2687 dsremoved = set(changes.removed)
2688 # store all local modifications, useful later for rename detection
2688 # store all local modifications, useful later for rename detection
2689 localchanges = dsmodified | dsadded
2689 localchanges = dsmodified | dsadded
2690
2690
2691 # only take into account for removes between wc and target
2691 # only take into account for removes between wc and target
2692 clean |= dsremoved - removed
2692 clean |= dsremoved - removed
2693 dsremoved &= removed
2693 dsremoved &= removed
2694 # distinct between dirstate remove and other
2694 # distinct between dirstate remove and other
2695 removed -= dsremoved
2695 removed -= dsremoved
2696
2696
2697 modadded = added & dsmodified
2697 modadded = added & dsmodified
2698 added -= modadded
2698 added -= modadded
2699
2699
2700 # tell newly modified apart.
2700 # tell newly modified apart.
2701 dsmodified &= modified
2701 dsmodified &= modified
2702 dsmodified |= modified & dsadded # dirstate added may need backup
2702 dsmodified |= modified & dsadded # dirstate added may need backup
2703 modified -= dsmodified
2703 modified -= dsmodified
2704
2704
2705 # We need to wait for some post-processing to update this set
2705 # We need to wait for some post-processing to update this set
2706 # before making the distinction. The dirstate will be used for
2706 # before making the distinction. The dirstate will be used for
2707 # that purpose.
2707 # that purpose.
2708 dsadded = added
2708 dsadded = added
2709
2709
2710 # in case of merge, files that are actually added can be reported as
2710 # in case of merge, files that are actually added can be reported as
2711 # modified, we need to post process the result
2711 # modified, we need to post process the result
2712 if p2 != nullid:
2712 if p2 != nullid:
2713 mergeadd = set(dsmodified)
2713 mergeadd = set(dsmodified)
2714 for path in dsmodified:
2714 for path in dsmodified:
2715 if path in mf:
2715 if path in mf:
2716 mergeadd.remove(path)
2716 mergeadd.remove(path)
2717 dsadded |= mergeadd
2717 dsadded |= mergeadd
2718 dsmodified -= mergeadd
2718 dsmodified -= mergeadd
2719
2719
2720 # if f is a rename, update `names` to also revert the source
2720 # if f is a rename, update `names` to also revert the source
2721 cwd = repo.getcwd()
2721 cwd = repo.getcwd()
2722 for f in localchanges:
2722 for f in localchanges:
2723 src = repo.dirstate.copied(f)
2723 src = repo.dirstate.copied(f)
2724 # XXX should we check for rename down to target node?
2724 # XXX should we check for rename down to target node?
2725 if src and src not in names and repo.dirstate[src] == 'r':
2725 if src and src not in names and repo.dirstate[src] == 'r':
2726 dsremoved.add(src)
2726 dsremoved.add(src)
2727 names[src] = (repo.pathto(src, cwd), True)
2727 names[src] = (repo.pathto(src, cwd), True)
2728
2728
2729 # determine the exact nature of the deleted changesets
2729 # determine the exact nature of the deleted changesets
2730 deladded = set(_deleted)
2730 deladded = set(_deleted)
2731 for path in _deleted:
2731 for path in _deleted:
2732 if path in mf:
2732 if path in mf:
2733 deladded.remove(path)
2733 deladded.remove(path)
2734 deleted = _deleted - deladded
2734 deleted = _deleted - deladded
2735
2735
2736 # distinguish between file to forget and the other
2736 # distinguish between file to forget and the other
2737 added = set()
2737 added = set()
2738 for abs in dsadded:
2738 for abs in dsadded:
2739 if repo.dirstate[abs] != 'a':
2739 if repo.dirstate[abs] != 'a':
2740 added.add(abs)
2740 added.add(abs)
2741 dsadded -= added
2741 dsadded -= added
2742
2742
2743 for abs in deladded:
2743 for abs in deladded:
2744 if repo.dirstate[abs] == 'a':
2744 if repo.dirstate[abs] == 'a':
2745 dsadded.add(abs)
2745 dsadded.add(abs)
2746 deladded -= dsadded
2746 deladded -= dsadded
2747
2747
2748 # For files marked as removed, we check if an unknown file is present at
2748 # For files marked as removed, we check if an unknown file is present at
2749 # the same path. If a such file exists it may need to be backed up.
2749 # the same path. If a such file exists it may need to be backed up.
2750 # Making the distinction at this stage helps have simpler backup
2750 # Making the distinction at this stage helps have simpler backup
2751 # logic.
2751 # logic.
2752 removunk = set()
2752 removunk = set()
2753 for abs in removed:
2753 for abs in removed:
2754 target = repo.wjoin(abs)
2754 target = repo.wjoin(abs)
2755 if os.path.lexists(target):
2755 if os.path.lexists(target):
2756 removunk.add(abs)
2756 removunk.add(abs)
2757 removed -= removunk
2757 removed -= removunk
2758
2758
2759 dsremovunk = set()
2759 dsremovunk = set()
2760 for abs in dsremoved:
2760 for abs in dsremoved:
2761 target = repo.wjoin(abs)
2761 target = repo.wjoin(abs)
2762 if os.path.lexists(target):
2762 if os.path.lexists(target):
2763 dsremovunk.add(abs)
2763 dsremovunk.add(abs)
2764 dsremoved -= dsremovunk
2764 dsremoved -= dsremovunk
2765
2765
2766 # action to be actually performed by revert
2766 # action to be actually performed by revert
2767 # (<list of file>, message>) tuple
2767 # (<list of file>, message>) tuple
2768 actions = {'revert': ([], _('reverting %s\n')),
2768 actions = {'revert': ([], _('reverting %s\n')),
2769 'add': ([], _('adding %s\n')),
2769 'add': ([], _('adding %s\n')),
2770 'remove': ([], _('removing %s\n')),
2770 'remove': ([], _('removing %s\n')),
2771 'drop': ([], _('removing %s\n')),
2771 'drop': ([], _('removing %s\n')),
2772 'forget': ([], _('forgetting %s\n')),
2772 'forget': ([], _('forgetting %s\n')),
2773 'undelete': ([], _('undeleting %s\n')),
2773 'undelete': ([], _('undeleting %s\n')),
2774 'noop': (None, _('no changes needed to %s\n')),
2774 'noop': (None, _('no changes needed to %s\n')),
2775 'unknown': (None, _('file not managed: %s\n')),
2775 'unknown': (None, _('file not managed: %s\n')),
2776 }
2776 }
2777
2777
2778 # "constant" that convey the backup strategy.
2778 # "constant" that convey the backup strategy.
2779 # All set to `discard` if `no-backup` is set do avoid checking
2779 # All set to `discard` if `no-backup` is set do avoid checking
2780 # no_backup lower in the code.
2780 # no_backup lower in the code.
2781 # These values are ordered for comparison purposes
2781 # These values are ordered for comparison purposes
2782 backupinteractive = 3 # do backup if interactively modified
2782 backupinteractive = 3 # do backup if interactively modified
2783 backup = 2 # unconditionally do backup
2783 backup = 2 # unconditionally do backup
2784 check = 1 # check if the existing file differs from target
2784 check = 1 # check if the existing file differs from target
2785 discard = 0 # never do backup
2785 discard = 0 # never do backup
2786 if opts.get('no_backup'):
2786 if opts.get('no_backup'):
2787 backupinteractive = backup = check = discard
2787 backupinteractive = backup = check = discard
2788 if interactive:
2788 if interactive:
2789 dsmodifiedbackup = backupinteractive
2789 dsmodifiedbackup = backupinteractive
2790 else:
2790 else:
2791 dsmodifiedbackup = backup
2791 dsmodifiedbackup = backup
2792 tobackup = set()
2792 tobackup = set()
2793
2793
2794 backupanddel = actions['remove']
2794 backupanddel = actions['remove']
2795 if not opts.get('no_backup'):
2795 if not opts.get('no_backup'):
2796 backupanddel = actions['drop']
2796 backupanddel = actions['drop']
2797
2797
2798 disptable = (
2798 disptable = (
2799 # dispatch table:
2799 # dispatch table:
2800 # file state
2800 # file state
2801 # action
2801 # action
2802 # make backup
2802 # make backup
2803
2803
2804 ## Sets that results that will change file on disk
2804 ## Sets that results that will change file on disk
2805 # Modified compared to target, no local change
2805 # Modified compared to target, no local change
2806 (modified, actions['revert'], discard),
2806 (modified, actions['revert'], discard),
2807 # Modified compared to target, but local file is deleted
2807 # Modified compared to target, but local file is deleted
2808 (deleted, actions['revert'], discard),
2808 (deleted, actions['revert'], discard),
2809 # Modified compared to target, local change
2809 # Modified compared to target, local change
2810 (dsmodified, actions['revert'], dsmodifiedbackup),
2810 (dsmodified, actions['revert'], dsmodifiedbackup),
2811 # Added since target
2811 # Added since target
2812 (added, actions['remove'], discard),
2812 (added, actions['remove'], discard),
2813 # Added in working directory
2813 # Added in working directory
2814 (dsadded, actions['forget'], discard),
2814 (dsadded, actions['forget'], discard),
2815 # Added since target, have local modification
2815 # Added since target, have local modification
2816 (modadded, backupanddel, backup),
2816 (modadded, backupanddel, backup),
2817 # Added since target but file is missing in working directory
2817 # Added since target but file is missing in working directory
2818 (deladded, actions['drop'], discard),
2818 (deladded, actions['drop'], discard),
2819 # Removed since target, before working copy parent
2819 # Removed since target, before working copy parent
2820 (removed, actions['add'], discard),
2820 (removed, actions['add'], discard),
2821 # Same as `removed` but an unknown file exists at the same path
2821 # Same as `removed` but an unknown file exists at the same path
2822 (removunk, actions['add'], check),
2822 (removunk, actions['add'], check),
2823 # Removed since targe, marked as such in working copy parent
2823 # Removed since targe, marked as such in working copy parent
2824 (dsremoved, actions['undelete'], discard),
2824 (dsremoved, actions['undelete'], discard),
2825 # Same as `dsremoved` but an unknown file exists at the same path
2825 # Same as `dsremoved` but an unknown file exists at the same path
2826 (dsremovunk, actions['undelete'], check),
2826 (dsremovunk, actions['undelete'], check),
2827 ## the following sets does not result in any file changes
2827 ## the following sets does not result in any file changes
2828 # File with no modification
2828 # File with no modification
2829 (clean, actions['noop'], discard),
2829 (clean, actions['noop'], discard),
2830 # Existing file, not tracked anywhere
2830 # Existing file, not tracked anywhere
2831 (unknown, actions['unknown'], discard),
2831 (unknown, actions['unknown'], discard),
2832 )
2832 )
2833
2833
2834 for abs, (rel, exact) in sorted(names.items()):
2834 for abs, (rel, exact) in sorted(names.items()):
2835 # target file to be touch on disk (relative to cwd)
2835 # target file to be touch on disk (relative to cwd)
2836 target = repo.wjoin(abs)
2836 target = repo.wjoin(abs)
2837 # search the entry in the dispatch table.
2837 # search the entry in the dispatch table.
2838 # if the file is in any of these sets, it was touched in the working
2838 # if the file is in any of these sets, it was touched in the working
2839 # directory parent and we are sure it needs to be reverted.
2839 # directory parent and we are sure it needs to be reverted.
2840 for table, (xlist, msg), dobackup in disptable:
2840 for table, (xlist, msg), dobackup in disptable:
2841 if abs not in table:
2841 if abs not in table:
2842 continue
2842 continue
2843 if xlist is not None:
2843 if xlist is not None:
2844 xlist.append(abs)
2844 xlist.append(abs)
2845 if dobackup:
2845 if dobackup:
2846 # If in interactive mode, don't automatically create
2846 # If in interactive mode, don't automatically create
2847 # .orig files (issue4793)
2847 # .orig files (issue4793)
2848 if dobackup == backupinteractive:
2848 if dobackup == backupinteractive:
2849 tobackup.add(abs)
2849 tobackup.add(abs)
2850 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2850 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2851 bakname = scmutil.origpath(ui, repo, rel)
2851 bakname = scmutil.origpath(ui, repo, rel)
2852 ui.note(_('saving current version of %s as %s\n') %
2852 ui.note(_('saving current version of %s as %s\n') %
2853 (rel, bakname))
2853 (rel, bakname))
2854 if not opts.get('dry_run'):
2854 if not opts.get('dry_run'):
2855 if interactive:
2855 if interactive:
2856 util.copyfile(target, bakname)
2856 util.copyfile(target, bakname)
2857 else:
2857 else:
2858 util.rename(target, bakname)
2858 util.rename(target, bakname)
2859 if ui.verbose or not exact:
2859 if ui.verbose or not exact:
2860 if not isinstance(msg, bytes):
2860 if not isinstance(msg, bytes):
2861 msg = msg(abs)
2861 msg = msg(abs)
2862 ui.status(msg % rel)
2862 ui.status(msg % rel)
2863 elif exact:
2863 elif exact:
2864 ui.warn(msg % rel)
2864 ui.warn(msg % rel)
2865 break
2865 break
2866
2866
2867 if not opts.get('dry_run'):
2867 if not opts.get('dry_run'):
2868 needdata = ('revert', 'add', 'undelete')
2868 needdata = ('revert', 'add', 'undelete')
2869 if _revertprefetch is not _revertprefetchstub:
2869 if _revertprefetch is not _revertprefetchstub:
2870 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2870 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2871 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2871 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2872 _revertprefetch(repo, ctx,
2872 _revertprefetch(repo, ctx,
2873 *[actions[name][0] for name in needdata])
2873 *[actions[name][0] for name in needdata])
2874 oplist = [actions[name][0] for name in needdata]
2874 oplist = [actions[name][0] for name in needdata]
2875 _prefetchfiles(repo, ctx,
2875 _prefetchfiles(repo, ctx,
2876 [f for sublist in oplist for f in sublist])
2876 [f for sublist in oplist for f in sublist])
2877 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2877 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2878
2878
2879 if targetsubs:
2879 if targetsubs:
2880 # Revert the subrepos on the revert list
2880 # Revert the subrepos on the revert list
2881 for sub in targetsubs:
2881 for sub in targetsubs:
2882 try:
2882 try:
2883 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2883 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2884 **pycompat.strkwargs(opts))
2884 **pycompat.strkwargs(opts))
2885 except KeyError:
2885 except KeyError:
2886 raise error.Abort("subrepository '%s' does not exist in %s!"
2886 raise error.Abort("subrepository '%s' does not exist in %s!"
2887 % (sub, short(ctx.node())))
2887 % (sub, short(ctx.node())))
2888
2888
2889 def _revertprefetchstub(repo, ctx, *files):
2889 def _revertprefetchstub(repo, ctx, *files):
2890 """Stub method for detecting extension wrapping of _revertprefetch(), to
2890 """Stub method for detecting extension wrapping of _revertprefetch(), to
2891 issue a deprecation warning."""
2891 issue a deprecation warning."""
2892
2892
2893 _revertprefetch = _revertprefetchstub
2893 _revertprefetch = _revertprefetchstub
2894
2894
2895 def _prefetchfiles(repo, ctx, files):
2895 def _prefetchfiles(repo, ctx, files):
2896 """Let extensions changing the storage layer prefetch content for any non
2896 """Let extensions changing the storage layer prefetch content for any non
2897 merge based command."""
2897 merge based command."""
2898
2898
2899 def _performrevert(repo, parents, ctx, actions, interactive=False,
2899 def _performrevert(repo, parents, ctx, actions, interactive=False,
2900 tobackup=None):
2900 tobackup=None):
2901 """function that actually perform all the actions computed for revert
2901 """function that actually perform all the actions computed for revert
2902
2902
2903 This is an independent function to let extension to plug in and react to
2903 This is an independent function to let extension to plug in and react to
2904 the imminent revert.
2904 the imminent revert.
2905
2905
2906 Make sure you have the working directory locked when calling this function.
2906 Make sure you have the working directory locked when calling this function.
2907 """
2907 """
2908 parent, p2 = parents
2908 parent, p2 = parents
2909 node = ctx.node()
2909 node = ctx.node()
2910 excluded_files = []
2910 excluded_files = []
2911 matcher_opts = {"exclude": excluded_files}
2911 matcher_opts = {"exclude": excluded_files}
2912
2912
2913 def checkout(f):
2913 def checkout(f):
2914 fc = ctx[f]
2914 fc = ctx[f]
2915 repo.wwrite(f, fc.data(), fc.flags())
2915 repo.wwrite(f, fc.data(), fc.flags())
2916
2916
2917 def doremove(f):
2917 def doremove(f):
2918 try:
2918 try:
2919 repo.wvfs.unlinkpath(f)
2919 repo.wvfs.unlinkpath(f)
2920 except OSError:
2920 except OSError:
2921 pass
2921 pass
2922 repo.dirstate.remove(f)
2922 repo.dirstate.remove(f)
2923
2923
2924 audit_path = pathutil.pathauditor(repo.root, cached=True)
2924 audit_path = pathutil.pathauditor(repo.root, cached=True)
2925 for f in actions['forget'][0]:
2925 for f in actions['forget'][0]:
2926 if interactive:
2926 if interactive:
2927 choice = repo.ui.promptchoice(
2927 choice = repo.ui.promptchoice(
2928 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2928 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2929 if choice == 0:
2929 if choice == 0:
2930 repo.dirstate.drop(f)
2930 repo.dirstate.drop(f)
2931 else:
2931 else:
2932 excluded_files.append(repo.wjoin(f))
2932 excluded_files.append(repo.wjoin(f))
2933 else:
2933 else:
2934 repo.dirstate.drop(f)
2934 repo.dirstate.drop(f)
2935 for f in actions['remove'][0]:
2935 for f in actions['remove'][0]:
2936 audit_path(f)
2936 audit_path(f)
2937 if interactive:
2937 if interactive:
2938 choice = repo.ui.promptchoice(
2938 choice = repo.ui.promptchoice(
2939 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2939 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2940 if choice == 0:
2940 if choice == 0:
2941 doremove(f)
2941 doremove(f)
2942 else:
2942 else:
2943 excluded_files.append(repo.wjoin(f))
2943 excluded_files.append(repo.wjoin(f))
2944 else:
2944 else:
2945 doremove(f)
2945 doremove(f)
2946 for f in actions['drop'][0]:
2946 for f in actions['drop'][0]:
2947 audit_path(f)
2947 audit_path(f)
2948 repo.dirstate.remove(f)
2948 repo.dirstate.remove(f)
2949
2949
2950 normal = None
2950 normal = None
2951 if node == parent:
2951 if node == parent:
2952 # We're reverting to our parent. If possible, we'd like status
2952 # We're reverting to our parent. If possible, we'd like status
2953 # to report the file as clean. We have to use normallookup for
2953 # to report the file as clean. We have to use normallookup for
2954 # merges to avoid losing information about merged/dirty files.
2954 # merges to avoid losing information about merged/dirty files.
2955 if p2 != nullid:
2955 if p2 != nullid:
2956 normal = repo.dirstate.normallookup
2956 normal = repo.dirstate.normallookup
2957 else:
2957 else:
2958 normal = repo.dirstate.normal
2958 normal = repo.dirstate.normal
2959
2959
2960 newlyaddedandmodifiedfiles = set()
2960 newlyaddedandmodifiedfiles = set()
2961 if interactive:
2961 if interactive:
2962 # Prompt the user for changes to revert
2962 # Prompt the user for changes to revert
2963 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2963 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2964 m = scmutil.match(ctx, torevert, matcher_opts)
2964 m = scmutil.match(ctx, torevert, matcher_opts)
2965 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2965 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2966 diffopts.nodates = True
2966 diffopts.nodates = True
2967 diffopts.git = True
2967 diffopts.git = True
2968 operation = 'discard'
2968 operation = 'discard'
2969 reversehunks = True
2969 reversehunks = True
2970 if node != parent:
2970 if node != parent:
2971 operation = 'apply'
2971 operation = 'apply'
2972 reversehunks = False
2972 reversehunks = False
2973 if reversehunks:
2973 if reversehunks:
2974 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2974 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2975 else:
2975 else:
2976 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2976 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2977 originalchunks = patch.parsepatch(diff)
2977 originalchunks = patch.parsepatch(diff)
2978
2978
2979 try:
2979 try:
2980
2980
2981 chunks, opts = recordfilter(repo.ui, originalchunks,
2981 chunks, opts = recordfilter(repo.ui, originalchunks,
2982 operation=operation)
2982 operation=operation)
2983 if reversehunks:
2983 if reversehunks:
2984 chunks = patch.reversehunks(chunks)
2984 chunks = patch.reversehunks(chunks)
2985
2985
2986 except error.PatchError as err:
2986 except error.PatchError as err:
2987 raise error.Abort(_('error parsing patch: %s') % err)
2987 raise error.Abort(_('error parsing patch: %s') % err)
2988
2988
2989 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2989 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2990 if tobackup is None:
2990 if tobackup is None:
2991 tobackup = set()
2991 tobackup = set()
2992 # Apply changes
2992 # Apply changes
2993 fp = stringio()
2993 fp = stringio()
2994 for c in chunks:
2994 for c in chunks:
2995 # Create a backup file only if this hunk should be backed up
2995 # Create a backup file only if this hunk should be backed up
2996 if ishunk(c) and c.header.filename() in tobackup:
2996 if ishunk(c) and c.header.filename() in tobackup:
2997 abs = c.header.filename()
2997 abs = c.header.filename()
2998 target = repo.wjoin(abs)
2998 target = repo.wjoin(abs)
2999 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
2999 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3000 util.copyfile(target, bakname)
3000 util.copyfile(target, bakname)
3001 tobackup.remove(abs)
3001 tobackup.remove(abs)
3002 c.write(fp)
3002 c.write(fp)
3003 dopatch = fp.tell()
3003 dopatch = fp.tell()
3004 fp.seek(0)
3004 fp.seek(0)
3005 if dopatch:
3005 if dopatch:
3006 try:
3006 try:
3007 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3007 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3008 except error.PatchError as err:
3008 except error.PatchError as err:
3009 raise error.Abort(str(err))
3009 raise error.Abort(str(err))
3010 del fp
3010 del fp
3011 else:
3011 else:
3012 for f in actions['revert'][0]:
3012 for f in actions['revert'][0]:
3013 checkout(f)
3013 checkout(f)
3014 if normal:
3014 if normal:
3015 normal(f)
3015 normal(f)
3016
3016
3017 for f in actions['add'][0]:
3017 for f in actions['add'][0]:
3018 # Don't checkout modified files, they are already created by the diff
3018 # Don't checkout modified files, they are already created by the diff
3019 if f not in newlyaddedandmodifiedfiles:
3019 if f not in newlyaddedandmodifiedfiles:
3020 checkout(f)
3020 checkout(f)
3021 repo.dirstate.add(f)
3021 repo.dirstate.add(f)
3022
3022
3023 normal = repo.dirstate.normallookup
3023 normal = repo.dirstate.normallookup
3024 if node == parent and p2 == nullid:
3024 if node == parent and p2 == nullid:
3025 normal = repo.dirstate.normal
3025 normal = repo.dirstate.normal
3026 for f in actions['undelete'][0]:
3026 for f in actions['undelete'][0]:
3027 checkout(f)
3027 checkout(f)
3028 normal(f)
3028 normal(f)
3029
3029
3030 copied = copies.pathcopies(repo[parent], ctx)
3030 copied = copies.pathcopies(repo[parent], ctx)
3031
3031
3032 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3032 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3033 if f in copied:
3033 if f in copied:
3034 repo.dirstate.copy(copied[f], f)
3034 repo.dirstate.copy(copied[f], f)
3035
3035
3036 class command(registrar.command):
3036 class command(registrar.command):
3037 """deprecated: used registrar.command instead"""
3037 """deprecated: used registrar.command instead"""
3038 def _doregister(self, func, name, *args, **kwargs):
3038 def _doregister(self, func, name, *args, **kwargs):
3039 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3039 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3040 return super(command, self)._doregister(func, name, *args, **kwargs)
3040 return super(command, self)._doregister(func, name, *args, **kwargs)
3041
3041
3042 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3042 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3043 # commands.outgoing. "missing" is "missing" of the result of
3043 # commands.outgoing. "missing" is "missing" of the result of
3044 # "findcommonoutgoing()"
3044 # "findcommonoutgoing()"
3045 outgoinghooks = util.hooks()
3045 outgoinghooks = util.hooks()
3046
3046
3047 # a list of (ui, repo) functions called by commands.summary
3047 # a list of (ui, repo) functions called by commands.summary
3048 summaryhooks = util.hooks()
3048 summaryhooks = util.hooks()
3049
3049
3050 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3050 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3051 #
3051 #
3052 # functions should return tuple of booleans below, if 'changes' is None:
3052 # functions should return tuple of booleans below, if 'changes' is None:
3053 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3053 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3054 #
3054 #
3055 # otherwise, 'changes' is a tuple of tuples below:
3055 # otherwise, 'changes' is a tuple of tuples below:
3056 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3056 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3057 # - (desturl, destbranch, destpeer, outgoing)
3057 # - (desturl, destbranch, destpeer, outgoing)
3058 summaryremotehooks = util.hooks()
3058 summaryremotehooks = util.hooks()
3059
3059
3060 # A list of state files kept by multistep operations like graft.
3060 # A list of state files kept by multistep operations like graft.
3061 # Since graft cannot be aborted, it is considered 'clearable' by update.
3061 # Since graft cannot be aborted, it is considered 'clearable' by update.
3062 # note: bisect is intentionally excluded
3062 # note: bisect is intentionally excluded
3063 # (state file, clearable, allowcommit, error, hint)
3063 # (state file, clearable, allowcommit, error, hint)
3064 unfinishedstates = [
3064 unfinishedstates = [
3065 ('graftstate', True, False, _('graft in progress'),
3065 ('graftstate', True, False, _('graft in progress'),
3066 _("use 'hg graft --continue' or 'hg update' to abort")),
3066 _("use 'hg graft --continue' or 'hg update' to abort")),
3067 ('updatestate', True, False, _('last update was interrupted'),
3067 ('updatestate', True, False, _('last update was interrupted'),
3068 _("use 'hg update' to get a consistent checkout"))
3068 _("use 'hg update' to get a consistent checkout"))
3069 ]
3069 ]
3070
3070
3071 def checkunfinished(repo, commit=False):
3071 def checkunfinished(repo, commit=False):
3072 '''Look for an unfinished multistep operation, like graft, and abort
3072 '''Look for an unfinished multistep operation, like graft, and abort
3073 if found. It's probably good to check this right before
3073 if found. It's probably good to check this right before
3074 bailifchanged().
3074 bailifchanged().
3075 '''
3075 '''
3076 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3076 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3077 if commit and allowcommit:
3077 if commit and allowcommit:
3078 continue
3078 continue
3079 if repo.vfs.exists(f):
3079 if repo.vfs.exists(f):
3080 raise error.Abort(msg, hint=hint)
3080 raise error.Abort(msg, hint=hint)
3081
3081
3082 def clearunfinished(repo):
3082 def clearunfinished(repo):
3083 '''Check for unfinished operations (as above), and clear the ones
3083 '''Check for unfinished operations (as above), and clear the ones
3084 that are clearable.
3084 that are clearable.
3085 '''
3085 '''
3086 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3086 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3087 if not clearable and repo.vfs.exists(f):
3087 if not clearable and repo.vfs.exists(f):
3088 raise error.Abort(msg, hint=hint)
3088 raise error.Abort(msg, hint=hint)
3089 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3089 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3090 if clearable and repo.vfs.exists(f):
3090 if clearable and repo.vfs.exists(f):
3091 util.unlink(repo.vfs.join(f))
3091 util.unlink(repo.vfs.join(f))
3092
3092
3093 afterresolvedstates = [
3093 afterresolvedstates = [
3094 ('graftstate',
3094 ('graftstate',
3095 _('hg graft --continue')),
3095 _('hg graft --continue')),
3096 ]
3096 ]
3097
3097
3098 def howtocontinue(repo):
3098 def howtocontinue(repo):
3099 '''Check for an unfinished operation and return the command to finish
3099 '''Check for an unfinished operation and return the command to finish
3100 it.
3100 it.
3101
3101
3102 afterresolvedstates tuples define a .hg/{file} and the corresponding
3102 afterresolvedstates tuples define a .hg/{file} and the corresponding
3103 command needed to finish it.
3103 command needed to finish it.
3104
3104
3105 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3105 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3106 a boolean.
3106 a boolean.
3107 '''
3107 '''
3108 contmsg = _("continue: %s")
3108 contmsg = _("continue: %s")
3109 for f, msg in afterresolvedstates:
3109 for f, msg in afterresolvedstates:
3110 if repo.vfs.exists(f):
3110 if repo.vfs.exists(f):
3111 return contmsg % msg, True
3111 return contmsg % msg, True
3112 if repo[None].dirty(missing=True, merge=False, branch=False):
3112 if repo[None].dirty(missing=True, merge=False, branch=False):
3113 return contmsg % _("hg commit"), False
3113 return contmsg % _("hg commit"), False
3114 return None, None
3114 return None, None
3115
3115
3116 def checkafterresolved(repo):
3116 def checkafterresolved(repo):
3117 '''Inform the user about the next action after completing hg resolve
3117 '''Inform the user about the next action after completing hg resolve
3118
3118
3119 If there's a matching afterresolvedstates, howtocontinue will yield
3119 If there's a matching afterresolvedstates, howtocontinue will yield
3120 repo.ui.warn as the reporter.
3120 repo.ui.warn as the reporter.
3121
3121
3122 Otherwise, it will yield repo.ui.note.
3122 Otherwise, it will yield repo.ui.note.
3123 '''
3123 '''
3124 msg, warning = howtocontinue(repo)
3124 msg, warning = howtocontinue(repo)
3125 if msg is not None:
3125 if msg is not None:
3126 if warning:
3126 if warning:
3127 repo.ui.warn("%s\n" % msg)
3127 repo.ui.warn("%s\n" % msg)
3128 else:
3128 else:
3129 repo.ui.note("%s\n" % msg)
3129 repo.ui.note("%s\n" % msg)
3130
3130
3131 def wrongtooltocontinue(repo, task):
3131 def wrongtooltocontinue(repo, task):
3132 '''Raise an abort suggesting how to properly continue if there is an
3132 '''Raise an abort suggesting how to properly continue if there is an
3133 active task.
3133 active task.
3134
3134
3135 Uses howtocontinue() to find the active task.
3135 Uses howtocontinue() to find the active task.
3136
3136
3137 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3137 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3138 a hint.
3138 a hint.
3139 '''
3139 '''
3140 after = howtocontinue(repo)
3140 after = howtocontinue(repo)
3141 hint = None
3141 hint = None
3142 if after[1]:
3142 if after[1]:
3143 hint = after[0]
3143 hint = after[0]
3144 raise error.Abort(_('no %s in progress') % task, hint=hint)
3144 raise error.Abort(_('no %s in progress') % task, hint=hint)
3145
3145
3146 class changeset_printer(logcmdutil.changesetprinter):
3146 class changeset_printer(logcmdutil.changesetprinter):
3147
3147
3148 def __init__(self, ui, *args, **kwargs):
3148 def __init__(self, ui, *args, **kwargs):
3149 msg = ("'cmdutil.changeset_printer' is deprecated, "
3149 msg = ("'cmdutil.changeset_printer' is deprecated, "
3150 "use 'logcmdutil.logcmdutil'")
3150 "use 'logcmdutil.logcmdutil'")
3151 ui.deprecwarn(msg, "4.6")
3151 ui.deprecwarn(msg, "4.6")
3152 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3152 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3153
3153
3154 def displaygraph(ui, *args, **kwargs):
3154 def displaygraph(ui, *args, **kwargs):
3155 msg = ("'cmdutil.displaygraph' is deprecated, "
3155 msg = ("'cmdutil.displaygraph' is deprecated, "
3156 "use 'logcmdutil.displaygraph'")
3156 "use 'logcmdutil.displaygraph'")
3157 ui.deprecwarn(msg, "4.6")
3157 ui.deprecwarn(msg, "4.6")
3158 return logcmdutil.displaygraph(ui, *args, **kwargs)
3158 return logcmdutil.displaygraph(ui, *args, **kwargs)
3159
3159
3160 def show_changeset(ui, *args, **kwargs):
3160 def show_changeset(ui, *args, **kwargs):
3161 msg = ("'cmdutil.show_changeset' is deprecated, "
3161 msg = ("'cmdutil.show_changeset' is deprecated, "
3162 "use 'logcmdutil.changesetdisplayer'")
3162 "use 'logcmdutil.changesetdisplayer'")
3163 ui.deprecwarn(msg, "4.6")
3163 ui.deprecwarn(msg, "4.6")
3164 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
3164 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
General Comments 0
You need to be logged in to leave comments. Login now